sha
stringlengths
40
40
text
stringlengths
1
13.4M
id
stringlengths
2
117
tags
listlengths
1
7.91k
created_at
stringlengths
25
25
metadata
stringlengths
2
875k
last_modified
stringlengths
25
25
arxiv
listlengths
0
25
languages
listlengths
0
7.91k
tags_str
stringlengths
17
159k
text_str
stringlengths
1
447k
text_lists
listlengths
0
352
processed_texts
listlengths
1
353
tokens_length
listlengths
1
353
input_texts
listlengths
1
40
embeddings
listlengths
768
768
629cedfb69fb9b702a759a43fc6de0193e190cb5
# Dataset Card for Evaluation run of TeeZee/Buttocks-7B-v1.1 <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [TeeZee/Buttocks-7B-v1.1](https://huggingface.co/TeeZee/Buttocks-7B-v1.1) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_TeeZee__Buttocks-7B-v1.1", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-01-25T01:11:42.327432](https://huggingface.co/datasets/open-llm-leaderboard/details_TeeZee__Buttocks-7B-v1.1/blob/main/results_2024-01-25T01-11-42.327432.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.49958748050002727, "acc_stderr": 0.03449947558483939, "acc_norm": 0.5072913093747228, "acc_norm_stderr": 0.03532795103647748, "mc1": 0.2962056303549572, "mc1_stderr": 0.015983595101811392, "mc2": 0.4472415883922134, "mc2_stderr": 0.015128282783775687 }, "harness|arc:challenge|25": { "acc": 0.5213310580204779, "acc_stderr": 0.014598087973127106, "acc_norm": 0.5460750853242321, "acc_norm_stderr": 0.01454922110517187 }, "harness|hellaswag|10": { "acc": 0.578868751244772, "acc_stderr": 0.004927314729433553, "acc_norm": 0.7561242780322645, "acc_norm_stderr": 0.004285410130466104 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.28, "acc_stderr": 0.04512608598542128, "acc_norm": 0.28, "acc_norm_stderr": 0.04512608598542128 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.48148148148148145, "acc_stderr": 0.043163785995113245, "acc_norm": 0.48148148148148145, "acc_norm_stderr": 0.043163785995113245 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.4342105263157895, "acc_stderr": 0.040335656678483184, "acc_norm": 0.4342105263157895, "acc_norm_stderr": 0.040335656678483184 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.44, "acc_stderr": 0.04988876515698589, "acc_norm": 0.44, "acc_norm_stderr": 0.04988876515698589 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.5358490566037736, "acc_stderr": 0.030693675018458003, "acc_norm": 0.5358490566037736, "acc_norm_stderr": 0.030693675018458003 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.5347222222222222, "acc_stderr": 0.04171115858181618, "acc_norm": 0.5347222222222222, "acc_norm_stderr": 0.04171115858181618 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.38, "acc_stderr": 0.04878317312145632, "acc_norm": 0.38, "acc_norm_stderr": 0.04878317312145632 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.4, "acc_stderr": 0.049236596391733084, "acc_norm": 0.4, "acc_norm_stderr": 0.049236596391733084 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.37, "acc_stderr": 0.048523658709391, "acc_norm": 0.37, "acc_norm_stderr": 0.048523658709391 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.4508670520231214, "acc_stderr": 0.03794012674697029, "acc_norm": 0.4508670520231214, "acc_norm_stderr": 0.03794012674697029 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.3431372549019608, "acc_stderr": 0.04724007352383888, "acc_norm": 0.3431372549019608, "acc_norm_stderr": 0.04724007352383888 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.63, "acc_stderr": 0.048523658709391, "acc_norm": 0.63, "acc_norm_stderr": 0.048523658709391 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.4595744680851064, "acc_stderr": 0.03257901482099835, "acc_norm": 0.4595744680851064, "acc_norm_stderr": 0.03257901482099835 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.35964912280701755, "acc_stderr": 0.04514496132873633, "acc_norm": 0.35964912280701755, "acc_norm_stderr": 0.04514496132873633 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.45517241379310347, "acc_stderr": 0.04149886942192117, "acc_norm": 0.45517241379310347, "acc_norm_stderr": 0.04149886942192117 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.36243386243386244, "acc_stderr": 0.024757473902752035, "acc_norm": 0.36243386243386244, "acc_norm_stderr": 0.024757473902752035 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.2698412698412698, "acc_stderr": 0.03970158273235172, "acc_norm": 0.2698412698412698, "acc_norm_stderr": 0.03970158273235172 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.29, "acc_stderr": 0.045604802157206845, "acc_norm": 0.29, "acc_norm_stderr": 0.045604802157206845 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.6064516129032258, "acc_stderr": 0.027791878753132267, "acc_norm": 0.6064516129032258, "acc_norm_stderr": 0.027791878753132267 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.39408866995073893, "acc_stderr": 0.034381579670365446, "acc_norm": 0.39408866995073893, "acc_norm_stderr": 0.034381579670365446 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.46, "acc_stderr": 0.05009082659620332, "acc_norm": 0.46, "acc_norm_stderr": 0.05009082659620332 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.6303030303030303, "acc_stderr": 0.03769430314512568, "acc_norm": 0.6303030303030303, "acc_norm_stderr": 0.03769430314512568 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.6565656565656566, "acc_stderr": 0.03383201223244441, "acc_norm": 0.6565656565656566, "acc_norm_stderr": 0.03383201223244441 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.689119170984456, "acc_stderr": 0.03340361906276586, "acc_norm": 0.689119170984456, "acc_norm_stderr": 0.03340361906276586 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.4846153846153846, "acc_stderr": 0.025339003010106522, "acc_norm": 0.4846153846153846, "acc_norm_stderr": 0.025339003010106522 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.28888888888888886, "acc_stderr": 0.027634907264178544, "acc_norm": 0.28888888888888886, "acc_norm_stderr": 0.027634907264178544 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.5378151260504201, "acc_stderr": 0.03238546948758979, "acc_norm": 0.5378151260504201, "acc_norm_stderr": 0.03238546948758979 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.33112582781456956, "acc_stderr": 0.038425817186598696, "acc_norm": 0.33112582781456956, "acc_norm_stderr": 0.038425817186598696 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.6990825688073394, "acc_stderr": 0.019664751366802114, "acc_norm": 0.6990825688073394, "acc_norm_stderr": 0.019664751366802114 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.4166666666666667, "acc_stderr": 0.03362277436608043, "acc_norm": 0.4166666666666667, "acc_norm_stderr": 0.03362277436608043 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.6127450980392157, "acc_stderr": 0.03418931233833342, "acc_norm": 0.6127450980392157, "acc_norm_stderr": 0.03418931233833342 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.6413502109704642, "acc_stderr": 0.031219569445301833, "acc_norm": 0.6413502109704642, "acc_norm_stderr": 0.031219569445301833 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.5605381165919282, "acc_stderr": 0.03331092511038179, "acc_norm": 0.5605381165919282, "acc_norm_stderr": 0.03331092511038179 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.5801526717557252, "acc_stderr": 0.04328577215262971, "acc_norm": 0.5801526717557252, "acc_norm_stderr": 0.04328577215262971 }, "harness|hendrycksTest-international_law|5": { "acc": 0.6528925619834711, "acc_stderr": 0.04345724570292535, "acc_norm": 0.6528925619834711, "acc_norm_stderr": 0.04345724570292535 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.5648148148148148, "acc_stderr": 0.04792898170907061, "acc_norm": 0.5648148148148148, "acc_norm_stderr": 0.04792898170907061 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.5828220858895705, "acc_stderr": 0.03874102859818081, "acc_norm": 0.5828220858895705, "acc_norm_stderr": 0.03874102859818081 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.4017857142857143, "acc_stderr": 0.04653333146973647, "acc_norm": 0.4017857142857143, "acc_norm_stderr": 0.04653333146973647 }, "harness|hendrycksTest-management|5": { "acc": 0.6796116504854369, "acc_stderr": 0.04620284082280042, "acc_norm": 0.6796116504854369, "acc_norm_stderr": 0.04620284082280042 }, "harness|hendrycksTest-marketing|5": { "acc": 0.7649572649572649, "acc_stderr": 0.027778835904935437, "acc_norm": 0.7649572649572649, "acc_norm_stderr": 0.027778835904935437 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.6, "acc_stderr": 0.049236596391733084, "acc_norm": 0.6, "acc_norm_stderr": 0.049236596391733084 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.6411238825031929, "acc_stderr": 0.017152991797501342, "acc_norm": 0.6411238825031929, "acc_norm_stderr": 0.017152991797501342 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.5, "acc_stderr": 0.026919095102908273, "acc_norm": 0.5, "acc_norm_stderr": 0.026919095102908273 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.23687150837988827, "acc_stderr": 0.014219570788103986, "acc_norm": 0.23687150837988827, "acc_norm_stderr": 0.014219570788103986 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.5032679738562091, "acc_stderr": 0.028629305194003543, "acc_norm": 0.5032679738562091, "acc_norm_stderr": 0.028629305194003543 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.5852090032154341, "acc_stderr": 0.027982680459759567, "acc_norm": 0.5852090032154341, "acc_norm_stderr": 0.027982680459759567 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.5123456790123457, "acc_stderr": 0.027812262269327242, "acc_norm": 0.5123456790123457, "acc_norm_stderr": 0.027812262269327242 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.40070921985815605, "acc_stderr": 0.029233465745573086, "acc_norm": 0.40070921985815605, "acc_norm_stderr": 0.029233465745573086 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.3852672750977836, "acc_stderr": 0.012429485434955182, "acc_norm": 0.3852672750977836, "acc_norm_stderr": 0.012429485434955182 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.47058823529411764, "acc_stderr": 0.030320243265004137, "acc_norm": 0.47058823529411764, "acc_norm_stderr": 0.030320243265004137 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.4820261437908497, "acc_stderr": 0.020214761037872408, "acc_norm": 0.4820261437908497, "acc_norm_stderr": 0.020214761037872408 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6454545454545455, "acc_stderr": 0.045820048415054174, "acc_norm": 0.6454545454545455, "acc_norm_stderr": 0.045820048415054174 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.5346938775510204, "acc_stderr": 0.03193207024425314, "acc_norm": 0.5346938775510204, "acc_norm_stderr": 0.03193207024425314 }, "harness|hendrycksTest-sociology|5": { "acc": 0.6318407960199005, "acc_stderr": 0.03410410565495302, "acc_norm": 0.6318407960199005, "acc_norm_stderr": 0.03410410565495302 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.69, "acc_stderr": 0.04648231987117316, "acc_norm": 0.69, "acc_norm_stderr": 0.04648231987117316 }, "harness|hendrycksTest-virology|5": { "acc": 0.4578313253012048, "acc_stderr": 0.038786267710023595, "acc_norm": 0.4578313253012048, "acc_norm_stderr": 0.038786267710023595 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.6374269005847953, "acc_stderr": 0.036871306155620606, "acc_norm": 0.6374269005847953, "acc_norm_stderr": 0.036871306155620606 }, "harness|truthfulqa:mc|0": { "mc1": 0.2962056303549572, "mc1_stderr": 0.015983595101811392, "mc2": 0.4472415883922134, "mc2_stderr": 0.015128282783775687 }, "harness|winogrande|5": { "acc": 0.6890292028413575, "acc_stderr": 0.013009534736286058 }, "harness|gsm8k|5": { "acc": 0.0576194086429113, "acc_stderr": 0.006418593319822861 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_TeeZee__Buttocks-7B-v1.1
[ "region:us" ]
2024-01-25T01:14:00+00:00
{"pretty_name": "Evaluation run of TeeZee/Buttocks-7B-v1.1", "dataset_summary": "Dataset automatically created during the evaluation run of model [TeeZee/Buttocks-7B-v1.1](https://huggingface.co/TeeZee/Buttocks-7B-v1.1) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_TeeZee__Buttocks-7B-v1.1\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-01-25T01:11:42.327432](https://huggingface.co/datasets/open-llm-leaderboard/details_TeeZee__Buttocks-7B-v1.1/blob/main/results_2024-01-25T01-11-42.327432.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.49958748050002727,\n \"acc_stderr\": 0.03449947558483939,\n \"acc_norm\": 0.5072913093747228,\n \"acc_norm_stderr\": 0.03532795103647748,\n \"mc1\": 0.2962056303549572,\n \"mc1_stderr\": 0.015983595101811392,\n \"mc2\": 0.4472415883922134,\n \"mc2_stderr\": 0.015128282783775687\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.5213310580204779,\n \"acc_stderr\": 0.014598087973127106,\n \"acc_norm\": 0.5460750853242321,\n \"acc_norm_stderr\": 0.01454922110517187\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.578868751244772,\n \"acc_stderr\": 0.004927314729433553,\n \"acc_norm\": 0.7561242780322645,\n \"acc_norm_stderr\": 0.004285410130466104\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.28,\n \"acc_stderr\": 0.04512608598542128,\n \"acc_norm\": 0.28,\n \"acc_norm_stderr\": 0.04512608598542128\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.48148148148148145,\n \"acc_stderr\": 0.043163785995113245,\n \"acc_norm\": 0.48148148148148145,\n \"acc_norm_stderr\": 0.043163785995113245\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.4342105263157895,\n \"acc_stderr\": 0.040335656678483184,\n \"acc_norm\": 0.4342105263157895,\n \"acc_norm_stderr\": 0.040335656678483184\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.44,\n \"acc_stderr\": 0.04988876515698589,\n \"acc_norm\": 0.44,\n \"acc_norm_stderr\": 0.04988876515698589\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.5358490566037736,\n \"acc_stderr\": 0.030693675018458003,\n \"acc_norm\": 0.5358490566037736,\n \"acc_norm_stderr\": 0.030693675018458003\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.5347222222222222,\n \"acc_stderr\": 0.04171115858181618,\n \"acc_norm\": 0.5347222222222222,\n \"acc_norm_stderr\": 0.04171115858181618\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.38,\n \"acc_stderr\": 0.04878317312145632,\n \"acc_norm\": 0.38,\n \"acc_norm_stderr\": 0.04878317312145632\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.4,\n \"acc_stderr\": 0.049236596391733084,\n \"acc_norm\": 0.4,\n \"acc_norm_stderr\": 0.049236596391733084\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.37,\n \"acc_stderr\": 0.048523658709391,\n \"acc_norm\": 0.37,\n \"acc_norm_stderr\": 0.048523658709391\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.4508670520231214,\n \"acc_stderr\": 0.03794012674697029,\n \"acc_norm\": 0.4508670520231214,\n \"acc_norm_stderr\": 0.03794012674697029\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.3431372549019608,\n \"acc_stderr\": 0.04724007352383888,\n \"acc_norm\": 0.3431372549019608,\n \"acc_norm_stderr\": 0.04724007352383888\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.63,\n \"acc_stderr\": 0.048523658709391,\n \"acc_norm\": 0.63,\n \"acc_norm_stderr\": 0.048523658709391\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.4595744680851064,\n \"acc_stderr\": 0.03257901482099835,\n \"acc_norm\": 0.4595744680851064,\n \"acc_norm_stderr\": 0.03257901482099835\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.35964912280701755,\n \"acc_stderr\": 0.04514496132873633,\n \"acc_norm\": 0.35964912280701755,\n \"acc_norm_stderr\": 0.04514496132873633\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.45517241379310347,\n \"acc_stderr\": 0.04149886942192117,\n \"acc_norm\": 0.45517241379310347,\n \"acc_norm_stderr\": 0.04149886942192117\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.36243386243386244,\n \"acc_stderr\": 0.024757473902752035,\n \"acc_norm\": 0.36243386243386244,\n \"acc_norm_stderr\": 0.024757473902752035\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.2698412698412698,\n \"acc_stderr\": 0.03970158273235172,\n \"acc_norm\": 0.2698412698412698,\n \"acc_norm_stderr\": 0.03970158273235172\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.29,\n \"acc_stderr\": 0.045604802157206845,\n \"acc_norm\": 0.29,\n \"acc_norm_stderr\": 0.045604802157206845\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.6064516129032258,\n \"acc_stderr\": 0.027791878753132267,\n \"acc_norm\": 0.6064516129032258,\n \"acc_norm_stderr\": 0.027791878753132267\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.39408866995073893,\n \"acc_stderr\": 0.034381579670365446,\n \"acc_norm\": 0.39408866995073893,\n \"acc_norm_stderr\": 0.034381579670365446\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.46,\n \"acc_stderr\": 0.05009082659620332,\n \"acc_norm\": 0.46,\n \"acc_norm_stderr\": 0.05009082659620332\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.6303030303030303,\n \"acc_stderr\": 0.03769430314512568,\n \"acc_norm\": 0.6303030303030303,\n \"acc_norm_stderr\": 0.03769430314512568\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.6565656565656566,\n \"acc_stderr\": 0.03383201223244441,\n \"acc_norm\": 0.6565656565656566,\n \"acc_norm_stderr\": 0.03383201223244441\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.689119170984456,\n \"acc_stderr\": 0.03340361906276586,\n \"acc_norm\": 0.689119170984456,\n \"acc_norm_stderr\": 0.03340361906276586\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.4846153846153846,\n \"acc_stderr\": 0.025339003010106522,\n \"acc_norm\": 0.4846153846153846,\n \"acc_norm_stderr\": 0.025339003010106522\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.28888888888888886,\n \"acc_stderr\": 0.027634907264178544,\n \"acc_norm\": 0.28888888888888886,\n \"acc_norm_stderr\": 0.027634907264178544\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.5378151260504201,\n \"acc_stderr\": 0.03238546948758979,\n \"acc_norm\": 0.5378151260504201,\n \"acc_norm_stderr\": 0.03238546948758979\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.33112582781456956,\n \"acc_stderr\": 0.038425817186598696,\n \"acc_norm\": 0.33112582781456956,\n \"acc_norm_stderr\": 0.038425817186598696\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.6990825688073394,\n \"acc_stderr\": 0.019664751366802114,\n \"acc_norm\": 0.6990825688073394,\n \"acc_norm_stderr\": 0.019664751366802114\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.4166666666666667,\n \"acc_stderr\": 0.03362277436608043,\n \"acc_norm\": 0.4166666666666667,\n \"acc_norm_stderr\": 0.03362277436608043\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.6127450980392157,\n \"acc_stderr\": 0.03418931233833342,\n \"acc_norm\": 0.6127450980392157,\n \"acc_norm_stderr\": 0.03418931233833342\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.6413502109704642,\n \"acc_stderr\": 0.031219569445301833,\n \"acc_norm\": 0.6413502109704642,\n \"acc_norm_stderr\": 0.031219569445301833\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.5605381165919282,\n \"acc_stderr\": 0.03331092511038179,\n \"acc_norm\": 0.5605381165919282,\n \"acc_norm_stderr\": 0.03331092511038179\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.5801526717557252,\n \"acc_stderr\": 0.04328577215262971,\n \"acc_norm\": 0.5801526717557252,\n \"acc_norm_stderr\": 0.04328577215262971\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.6528925619834711,\n \"acc_stderr\": 0.04345724570292535,\n \"acc_norm\": 0.6528925619834711,\n \"acc_norm_stderr\": 0.04345724570292535\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.5648148148148148,\n \"acc_stderr\": 0.04792898170907061,\n \"acc_norm\": 0.5648148148148148,\n \"acc_norm_stderr\": 0.04792898170907061\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.5828220858895705,\n \"acc_stderr\": 0.03874102859818081,\n \"acc_norm\": 0.5828220858895705,\n \"acc_norm_stderr\": 0.03874102859818081\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.4017857142857143,\n \"acc_stderr\": 0.04653333146973647,\n \"acc_norm\": 0.4017857142857143,\n \"acc_norm_stderr\": 0.04653333146973647\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.6796116504854369,\n \"acc_stderr\": 0.04620284082280042,\n \"acc_norm\": 0.6796116504854369,\n \"acc_norm_stderr\": 0.04620284082280042\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.7649572649572649,\n \"acc_stderr\": 0.027778835904935437,\n \"acc_norm\": 0.7649572649572649,\n \"acc_norm_stderr\": 0.027778835904935437\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.6,\n \"acc_stderr\": 0.049236596391733084,\n \"acc_norm\": 0.6,\n \"acc_norm_stderr\": 0.049236596391733084\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.6411238825031929,\n \"acc_stderr\": 0.017152991797501342,\n \"acc_norm\": 0.6411238825031929,\n \"acc_norm_stderr\": 0.017152991797501342\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.5,\n \"acc_stderr\": 0.026919095102908273,\n \"acc_norm\": 0.5,\n \"acc_norm_stderr\": 0.026919095102908273\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.23687150837988827,\n \"acc_stderr\": 0.014219570788103986,\n \"acc_norm\": 0.23687150837988827,\n \"acc_norm_stderr\": 0.014219570788103986\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.5032679738562091,\n \"acc_stderr\": 0.028629305194003543,\n \"acc_norm\": 0.5032679738562091,\n \"acc_norm_stderr\": 0.028629305194003543\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.5852090032154341,\n \"acc_stderr\": 0.027982680459759567,\n \"acc_norm\": 0.5852090032154341,\n \"acc_norm_stderr\": 0.027982680459759567\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.5123456790123457,\n \"acc_stderr\": 0.027812262269327242,\n \"acc_norm\": 0.5123456790123457,\n \"acc_norm_stderr\": 0.027812262269327242\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.40070921985815605,\n \"acc_stderr\": 0.029233465745573086,\n \"acc_norm\": 0.40070921985815605,\n \"acc_norm_stderr\": 0.029233465745573086\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.3852672750977836,\n \"acc_stderr\": 0.012429485434955182,\n \"acc_norm\": 0.3852672750977836,\n \"acc_norm_stderr\": 0.012429485434955182\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.47058823529411764,\n \"acc_stderr\": 0.030320243265004137,\n \"acc_norm\": 0.47058823529411764,\n \"acc_norm_stderr\": 0.030320243265004137\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.4820261437908497,\n \"acc_stderr\": 0.020214761037872408,\n \"acc_norm\": 0.4820261437908497,\n \"acc_norm_stderr\": 0.020214761037872408\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6454545454545455,\n \"acc_stderr\": 0.045820048415054174,\n \"acc_norm\": 0.6454545454545455,\n \"acc_norm_stderr\": 0.045820048415054174\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.5346938775510204,\n \"acc_stderr\": 0.03193207024425314,\n \"acc_norm\": 0.5346938775510204,\n \"acc_norm_stderr\": 0.03193207024425314\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.6318407960199005,\n \"acc_stderr\": 0.03410410565495302,\n \"acc_norm\": 0.6318407960199005,\n \"acc_norm_stderr\": 0.03410410565495302\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.69,\n \"acc_stderr\": 0.04648231987117316,\n \"acc_norm\": 0.69,\n \"acc_norm_stderr\": 0.04648231987117316\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.4578313253012048,\n \"acc_stderr\": 0.038786267710023595,\n \"acc_norm\": 0.4578313253012048,\n \"acc_norm_stderr\": 0.038786267710023595\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.6374269005847953,\n \"acc_stderr\": 0.036871306155620606,\n \"acc_norm\": 0.6374269005847953,\n \"acc_norm_stderr\": 0.036871306155620606\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.2962056303549572,\n \"mc1_stderr\": 0.015983595101811392,\n \"mc2\": 0.4472415883922134,\n \"mc2_stderr\": 0.015128282783775687\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.6890292028413575,\n \"acc_stderr\": 0.013009534736286058\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.0576194086429113,\n \"acc_stderr\": 0.006418593319822861\n }\n}\n```", "repo_url": "https://huggingface.co/TeeZee/Buttocks-7B-v1.1", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_01_25T01_11_42.327432", "path": ["**/details_harness|arc:challenge|25_2024-01-25T01-11-42.327432.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-01-25T01-11-42.327432.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_01_25T01_11_42.327432", "path": ["**/details_harness|gsm8k|5_2024-01-25T01-11-42.327432.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-01-25T01-11-42.327432.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_01_25T01_11_42.327432", "path": ["**/details_harness|hellaswag|10_2024-01-25T01-11-42.327432.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-01-25T01-11-42.327432.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_01_25T01_11_42.327432", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-25T01-11-42.327432.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-25T01-11-42.327432.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-25T01-11-42.327432.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-25T01-11-42.327432.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-25T01-11-42.327432.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-25T01-11-42.327432.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-25T01-11-42.327432.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-25T01-11-42.327432.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-25T01-11-42.327432.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-25T01-11-42.327432.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-25T01-11-42.327432.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-25T01-11-42.327432.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-25T01-11-42.327432.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-25T01-11-42.327432.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-25T01-11-42.327432.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-25T01-11-42.327432.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-25T01-11-42.327432.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-25T01-11-42.327432.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-25T01-11-42.327432.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-25T01-11-42.327432.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-25T01-11-42.327432.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-25T01-11-42.327432.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-25T01-11-42.327432.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-25T01-11-42.327432.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-25T01-11-42.327432.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-25T01-11-42.327432.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-25T01-11-42.327432.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-25T01-11-42.327432.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-25T01-11-42.327432.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-25T01-11-42.327432.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-25T01-11-42.327432.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-25T01-11-42.327432.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-25T01-11-42.327432.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-25T01-11-42.327432.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-25T01-11-42.327432.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-25T01-11-42.327432.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-25T01-11-42.327432.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-25T01-11-42.327432.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-25T01-11-42.327432.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-25T01-11-42.327432.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-25T01-11-42.327432.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-25T01-11-42.327432.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-25T01-11-42.327432.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-25T01-11-42.327432.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-25T01-11-42.327432.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-25T01-11-42.327432.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-25T01-11-42.327432.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-25T01-11-42.327432.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-25T01-11-42.327432.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-25T01-11-42.327432.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-25T01-11-42.327432.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-25T01-11-42.327432.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-25T01-11-42.327432.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-25T01-11-42.327432.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-25T01-11-42.327432.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-25T01-11-42.327432.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-25T01-11-42.327432.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-25T01-11-42.327432.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-25T01-11-42.327432.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-25T01-11-42.327432.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-25T01-11-42.327432.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-25T01-11-42.327432.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-25T01-11-42.327432.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-25T01-11-42.327432.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-25T01-11-42.327432.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-25T01-11-42.327432.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-25T01-11-42.327432.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-25T01-11-42.327432.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-25T01-11-42.327432.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-25T01-11-42.327432.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-25T01-11-42.327432.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-25T01-11-42.327432.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-25T01-11-42.327432.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-25T01-11-42.327432.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-25T01-11-42.327432.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-25T01-11-42.327432.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-25T01-11-42.327432.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-25T01-11-42.327432.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-25T01-11-42.327432.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-25T01-11-42.327432.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-25T01-11-42.327432.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-25T01-11-42.327432.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-25T01-11-42.327432.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-25T01-11-42.327432.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-25T01-11-42.327432.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-25T01-11-42.327432.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-25T01-11-42.327432.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-25T01-11-42.327432.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-25T01-11-42.327432.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-25T01-11-42.327432.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-25T01-11-42.327432.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-25T01-11-42.327432.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-25T01-11-42.327432.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-25T01-11-42.327432.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-25T01-11-42.327432.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-25T01-11-42.327432.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-25T01-11-42.327432.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-25T01-11-42.327432.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-25T01-11-42.327432.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-25T01-11-42.327432.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-25T01-11-42.327432.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-25T01-11-42.327432.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-25T01-11-42.327432.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-25T01-11-42.327432.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-25T01-11-42.327432.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-25T01-11-42.327432.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-25T01-11-42.327432.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-25T01-11-42.327432.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-25T01-11-42.327432.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-25T01-11-42.327432.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-25T01-11-42.327432.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-25T01-11-42.327432.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-25T01-11-42.327432.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-25T01-11-42.327432.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_01_25T01_11_42.327432", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-25T01-11-42.327432.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-25T01-11-42.327432.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_01_25T01_11_42.327432", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-25T01-11-42.327432.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-25T01-11-42.327432.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_01_25T01_11_42.327432", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-25T01-11-42.327432.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-25T01-11-42.327432.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_01_25T01_11_42.327432", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-25T01-11-42.327432.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-25T01-11-42.327432.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_01_25T01_11_42.327432", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-25T01-11-42.327432.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-25T01-11-42.327432.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_01_25T01_11_42.327432", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-25T01-11-42.327432.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-25T01-11-42.327432.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_01_25T01_11_42.327432", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-25T01-11-42.327432.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-25T01-11-42.327432.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_01_25T01_11_42.327432", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-25T01-11-42.327432.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-25T01-11-42.327432.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_01_25T01_11_42.327432", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-25T01-11-42.327432.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-25T01-11-42.327432.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_01_25T01_11_42.327432", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-25T01-11-42.327432.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-25T01-11-42.327432.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_01_25T01_11_42.327432", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-25T01-11-42.327432.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-25T01-11-42.327432.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_01_25T01_11_42.327432", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-25T01-11-42.327432.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-25T01-11-42.327432.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_01_25T01_11_42.327432", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-25T01-11-42.327432.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-25T01-11-42.327432.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_01_25T01_11_42.327432", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-25T01-11-42.327432.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-25T01-11-42.327432.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_01_25T01_11_42.327432", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-25T01-11-42.327432.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-25T01-11-42.327432.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_01_25T01_11_42.327432", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-25T01-11-42.327432.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-25T01-11-42.327432.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_01_25T01_11_42.327432", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-25T01-11-42.327432.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-25T01-11-42.327432.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_01_25T01_11_42.327432", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-25T01-11-42.327432.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-25T01-11-42.327432.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_01_25T01_11_42.327432", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-25T01-11-42.327432.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-25T01-11-42.327432.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_01_25T01_11_42.327432", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-25T01-11-42.327432.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-25T01-11-42.327432.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_01_25T01_11_42.327432", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-25T01-11-42.327432.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-25T01-11-42.327432.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_01_25T01_11_42.327432", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-25T01-11-42.327432.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-25T01-11-42.327432.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_01_25T01_11_42.327432", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-25T01-11-42.327432.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-25T01-11-42.327432.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_01_25T01_11_42.327432", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-25T01-11-42.327432.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-25T01-11-42.327432.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_01_25T01_11_42.327432", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-25T01-11-42.327432.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-25T01-11-42.327432.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_01_25T01_11_42.327432", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-25T01-11-42.327432.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-25T01-11-42.327432.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_01_25T01_11_42.327432", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-25T01-11-42.327432.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-25T01-11-42.327432.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_01_25T01_11_42.327432", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-25T01-11-42.327432.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-25T01-11-42.327432.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_01_25T01_11_42.327432", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-25T01-11-42.327432.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-25T01-11-42.327432.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_01_25T01_11_42.327432", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-25T01-11-42.327432.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-25T01-11-42.327432.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_01_25T01_11_42.327432", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-25T01-11-42.327432.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-25T01-11-42.327432.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_01_25T01_11_42.327432", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-25T01-11-42.327432.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-25T01-11-42.327432.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_01_25T01_11_42.327432", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-25T01-11-42.327432.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-25T01-11-42.327432.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_01_25T01_11_42.327432", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-25T01-11-42.327432.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-25T01-11-42.327432.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_01_25T01_11_42.327432", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-25T01-11-42.327432.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-25T01-11-42.327432.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_01_25T01_11_42.327432", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-25T01-11-42.327432.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-25T01-11-42.327432.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_01_25T01_11_42.327432", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-25T01-11-42.327432.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-25T01-11-42.327432.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_01_25T01_11_42.327432", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-25T01-11-42.327432.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-25T01-11-42.327432.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_01_25T01_11_42.327432", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-25T01-11-42.327432.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-25T01-11-42.327432.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_01_25T01_11_42.327432", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-25T01-11-42.327432.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-25T01-11-42.327432.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_01_25T01_11_42.327432", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-25T01-11-42.327432.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-25T01-11-42.327432.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_01_25T01_11_42.327432", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-25T01-11-42.327432.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-25T01-11-42.327432.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_01_25T01_11_42.327432", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-25T01-11-42.327432.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-25T01-11-42.327432.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_01_25T01_11_42.327432", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-25T01-11-42.327432.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-25T01-11-42.327432.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_01_25T01_11_42.327432", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-25T01-11-42.327432.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-25T01-11-42.327432.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_01_25T01_11_42.327432", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-25T01-11-42.327432.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-25T01-11-42.327432.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_01_25T01_11_42.327432", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-25T01-11-42.327432.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-25T01-11-42.327432.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_01_25T01_11_42.327432", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-25T01-11-42.327432.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-25T01-11-42.327432.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_01_25T01_11_42.327432", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-25T01-11-42.327432.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-25T01-11-42.327432.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_01_25T01_11_42.327432", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-25T01-11-42.327432.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-25T01-11-42.327432.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_01_25T01_11_42.327432", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-25T01-11-42.327432.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-25T01-11-42.327432.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_01_25T01_11_42.327432", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-25T01-11-42.327432.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-25T01-11-42.327432.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_01_25T01_11_42.327432", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-25T01-11-42.327432.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-25T01-11-42.327432.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_01_25T01_11_42.327432", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-25T01-11-42.327432.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-25T01-11-42.327432.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_01_25T01_11_42.327432", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-25T01-11-42.327432.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-25T01-11-42.327432.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_01_25T01_11_42.327432", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-25T01-11-42.327432.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-25T01-11-42.327432.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_01_25T01_11_42.327432", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-25T01-11-42.327432.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-25T01-11-42.327432.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_01_25T01_11_42.327432", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-25T01-11-42.327432.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-25T01-11-42.327432.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_01_25T01_11_42.327432", "path": ["**/details_harness|winogrande|5_2024-01-25T01-11-42.327432.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-01-25T01-11-42.327432.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_01_25T01_11_42.327432", "path": ["results_2024-01-25T01-11-42.327432.parquet"]}, {"split": "latest", "path": ["results_2024-01-25T01-11-42.327432.parquet"]}]}]}
2024-01-25T01:14:25+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of TeeZee/Buttocks-7B-v1.1 Dataset automatically created during the evaluation run of model TeeZee/Buttocks-7B-v1.1 on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-01-25T01:11:42.327432(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of TeeZee/Buttocks-7B-v1.1\n\n\n\nDataset automatically created during the evaluation run of model TeeZee/Buttocks-7B-v1.1 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-25T01:11:42.327432(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of TeeZee/Buttocks-7B-v1.1\n\n\n\nDataset automatically created during the evaluation run of model TeeZee/Buttocks-7B-v1.1 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-25T01:11:42.327432(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ 6, 185, 68, 4, 40, 29, 3, 4, 9, 6, 5, 7, 4, 7, 10, 9, 5, 9, 8, 10, 46, 8, 7, 10, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of TeeZee/Buttocks-7B-v1.1\n\n\n\nDataset automatically created during the evaluation run of model TeeZee/Buttocks-7B-v1.1 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2024-01-25T01:11:42.327432(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):## Dataset Details### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:## Uses### Direct Use### Out-of-Scope Use## Dataset Structure## Dataset Creation### Curation Rationale### Source Data#### Data Collection and Processing#### Who are the source data producers?### Annotations [optional]#### Annotation process#### Who are the annotators?#### Personal and Sensitive Information## Bias, Risks, and Limitations### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:## Glossary [optional]## More Information [optional]## Dataset Card Authors [optional]## Dataset Card Contact" ]
[ -0.0450860820710659, 0.2001347541809082, -0.005114758387207985, 0.04440361633896828, 0.07266617566347122, -0.011702877469360828, 0.04861690104007721, 0.1023058295249939, 0.032493237406015396, 0.18310001492500305, -0.02433968335390091, 0.1125425174832344, 0.06688682734966278, 0.127639040350914, 0.029026247560977936, -0.1277250349521637, 0.022049853578209877, -0.08314596116542816, 0.1067599281668663, 0.08210724592208862, 0.06017288565635681, -0.07351785153150558, 0.06810999661684036, -0.0221259668469429, 0.034020598977804184, -0.010075275786221027, -0.07775742560625076, -0.029259834438562393, 0.10019857436418533, 0.10958856344223022, 0.03480572625994682, -0.018208522349596024, 0.022989539429545403, -0.27057501673698425, 0.014749799855053425, 0.09174642711877823, -0.010165870189666748, 0.032097212970256805, 0.1312047690153122, -0.07949422299861908, 0.08752840012311935, -0.020807644352316856, 0.08279038965702057, 0.05744341388344765, -0.10956606268882751, -0.14645017683506012, -0.1518067568540573, 0.006947921123355627, 0.05398380011320114, 0.041439346969127655, -0.026645025238394737, 0.1643301546573639, -0.07472965121269226, 0.05071425810456276, 0.140685573220253, -0.10239206999540329, -0.018010830506682396, 0.04305419698357582, 0.019665606319904327, 0.07242211699485779, -0.08347898721694946, -0.027663173153996468, 0.0359785258769989, 0.05993227660655975, -0.007359845098108053, 0.013909337110817432, -0.01776275970041752, 0.007816622965037823, -0.1379290670156479, -0.1315038949251175, 0.1522262543439865, 0.008035185746848583, -0.05481096729636192, -0.17894619703292847, -0.014299225993454456, 0.018725842237472534, -0.0003607550170272589, 0.0015480839647352695, -0.006516351830214262, -0.01831967383623123, 0.0991874635219574, -0.0027672627475112677, -0.102619469165802, -0.02793366275727749, -0.002284846967086196, 0.07584314048290253, 0.025144970044493675, -0.009559760801494122, 0.0065461681224405766, 0.12073077261447906, 0.018382417038083076, -0.06195944920182228, -0.07489525526762009, -0.05800111964344978, -0.1216113343834877, -0.04361896216869354, 0.020503828302025795, -0.05875715985894203, 0.034701716154813766, 0.2412714809179306, 0.0032172452192753553, 0.029712079092860222, -0.12190348654985428, 0.00829376745969057, 0.12332824617624283, 0.04993140324950218, -0.07575323432683945, -0.05550753325223923, -0.03544166684150696, 0.01969780959188938, 0.03662848472595215, -0.013750235550105572, 0.013391416519880295, 0.06149576976895332, 0.025973787531256676, 0.12326285243034363, 0.12221208214759827, 0.030246693640947342, -0.0777197927236557, -0.018278658390045166, 0.25007084012031555, -0.13774707913398743, -0.019758183509111404, 0.01834292523562908, -0.036293014883995056, -0.1196165531873703, 0.06566747277975082, -0.0064453547820448875, -0.05113289877772331, 0.11884619295597076, -0.04502187296748161, -0.08327683061361313, -0.06925114244222641, -0.06624735891819, 0.05684139207005501, 0.0151238227263093, -0.04639170691370964, -0.0656539723277092, -0.10747525095939636, -0.08047734946012497, 0.02826223336160183, -0.06475824862718582, -0.024217652156949043, 0.025449173524975777, -0.004817675799131393, -0.010261868126690388, -0.011650798842310905, 0.11879394948482513, -0.06317464262247086, 0.037940580397844315, -0.011989196762442589, 0.01842367835342884, 0.09367214143276215, 0.03657304495573044, -0.11721549183130264, 0.07728061825037003, -0.12337631732225418, 0.10021088272333145, -0.11766131222248077, -0.024724634364247322, -0.11729799211025238, -0.004109379835426807, -0.026629192754626274, 0.04317688196897507, -0.03896447643637657, 0.082329161465168, -0.22023838758468628, 0.0011189422802999616, 0.15585926175117493, -0.12297439575195312, -0.07566996663808823, 0.09521707147359848, -0.04537934064865112, 0.07186099141836166, 0.046509139239788055, 0.10038246959447861, 0.1132521852850914, -0.08501756936311722, -0.08965976536273956, -0.05923207849264145, -0.030541539192199707, 0.16405805945396423, 0.06317833065986633, -0.08178936690092087, 0.09992903470993042, 0.05012335255742073, -0.010530661791563034, -0.07010617852210999, -0.0020748309325426817, -0.0649590715765953, -0.023118330165743828, -0.07022541016340256, -0.048264000564813614, -0.004905518144369125, -0.07447008788585663, -0.024184787645936012, -0.0831620916724205, -0.00964614562690258, 0.10342168062925339, -0.023631514981389046, 0.011222761124372482, -0.07250630110502243, 0.021714692935347557, 0.011078608222305775, 0.010657316073775291, -0.21842969954013824, -0.08983496576547623, 0.03368816152215004, -0.21038134396076202, 0.05836687982082367, 0.04552049934864044, 0.00876545999199152, 0.051851917058229446, -0.0059275878593325615, 0.03394073620438576, 0.01823621615767479, -0.010962816886603832, -0.011965845711529255, -0.14351755380630493, -0.062047094106674194, -0.08689691126346588, 0.08836610615253448, -0.1365063339471817, -0.0125385457649827, 0.060430869460105896, 0.14839231967926025, 0.02319948375225067, -0.07886738330125809, 0.057583779096603394, 0.010395198129117489, -0.04617789760231972, -0.051801588386297226, -0.007191461510956287, -0.02993234060704708, 0.04464029148221016, 0.02971728891134262, -0.1946394443511963, -0.10585369169712067, 0.06733392179012299, 0.13865448534488678, -0.07251850515604019, -0.07842928171157837, -0.061552438884973526, -0.06338544189929962, -0.08796259015798569, -0.07327243685722351, 0.06153878942131996, 0.08924677222967148, 0.04061288386583328, -0.0717652291059494, -0.055914297699928284, 0.009328080341219902, 0.05431573837995529, -0.06025127321481705, 0.11197761446237564, 0.07330044358968735, -0.08146360516548157, 0.10709583014249802, -0.04803042858839035, 0.11234898120164871, 0.08566231280565262, 0.03298256918787956, -0.10058631002902985, 0.0033717015758156776, 0.06214514747262001, 0.04417581856250763, 0.07480836659669876, -0.0604536272585392, 0.037254028022289276, 0.08483017235994339, -0.012483129277825356, 0.03376729041337967, -0.06744799017906189, 0.02612963691353798, 0.03418387472629547, 0.0042279064655303955, 0.013042351230978966, 0.015042448416352272, 0.0200912207365036, 0.08275265991687775, 0.018593057990074158, 0.10462139546871185, -0.02466423809528351, -0.05115653946995735, -0.10419205576181412, 0.14203034341335297, -0.08404702693223953, -0.27668020129203796, -0.16497574746608734, -0.04568593204021454, -0.03179463744163513, -0.011021688580513, 0.06648119539022446, -0.006411617621779442, -0.10888315737247467, -0.11360162496566772, 0.05814700946211815, 0.04225226491689682, -0.13782627880573273, -0.05274008959531784, 0.055021367967128754, -0.012032774277031422, -0.17457284033298492, 0.04292552173137665, 0.04912426695227623, -0.059124890714883804, -0.001569655374623835, 0.06842534244060516, 0.108165442943573, 0.0903080627322197, 0.08256460726261139, -0.029344001784920692, -0.009614805690944195, 0.17044636607170105, -0.10882291942834854, 0.03451494872570038, 0.10873118042945862, -0.05120785906910896, 0.0638769343495369, 0.1587544083595276, 0.01274037454277277, -0.09097885340452194, 0.060092777013778687, 0.09827854484319687, -0.07214665412902832, -0.24370072782039642, -0.12634633481502533, -0.034916460514068604, 0.021719465032219887, 0.11508364230394363, 0.06530161947011948, 0.030073128640651703, 0.011201177723705769, -0.12953004240989685, -0.017423849552869797, -0.04717954620718956, 0.06958091259002686, 0.0606980063021183, -0.004832537844777107, 0.03991669416427612, -0.04763546213507652, 0.01911592110991478, 0.12011793255805969, 0.05250469967722893, 0.1404438316822052, -0.040862563997507095, 0.19021104276180267, 0.09374691545963287, 0.08124589920043945, -0.04246344044804573, 0.03992057591676712, -0.007397996261715889, 0.06259376555681229, -0.01506574172526598, -0.10687236487865448, -0.05211896821856499, 0.10269853472709656, 0.03513258323073387, -0.0711984857916832, 0.04199545085430145, -0.09149777144193649, 0.03229827806353569, 0.19417119026184082, -0.028086958453059196, -0.1184564009308815, -0.05978381261229515, 0.06401744484901428, -0.03921382501721382, -0.09352622926235199, -0.01134952250868082, 0.08500821143388748, -0.14933505654335022, 0.012736543081700802, -0.032763995230197906, 0.0743166133761406, -0.12597087025642395, -0.02920137718319893, -0.02835642546415329, 0.03425072506070137, -0.003943316638469696, 0.1148754432797432, -0.127872034907341, 0.08716243505477905, -0.00408356124535203, 0.020477406680583954, -0.11573140323162079, 0.053712911903858185, -0.03561776131391525, -0.06586363911628723, 0.13097934424877167, -0.01329303439706564, -0.06888583302497864, -0.04315609857439995, -0.10298416763544083, -0.008272767998278141, 0.04884304478764534, -0.09962812066078186, 0.10418356955051422, 0.028208361938595772, -0.023302769288420677, -0.026621665805578232, -0.012995482422411442, -0.12028581649065018, -0.24360933899879456, 0.10706654191017151, -0.12927599251270294, 0.022750183939933777, -0.06745100021362305, -0.0515364333987236, -0.03386200964450836, 0.1433652639389038, -0.08839232474565506, -0.05285986512899399, -0.10433164983987808, -0.015429440885782242, 0.18482674658298492, -0.04637981951236725, 0.05993874371051788, -0.04238515347242355, 0.18888403475284576, -0.024629686027765274, -0.04231023043394089, -0.008517946116626263, -0.08985169976949692, -0.195425882935524, -0.05100554600358009, 0.11581432074308395, 0.08233706653118134, 0.018237760290503502, -0.0055572339333593845, 0.01036691851913929, 0.018769124522805214, -0.10044900327920914, 0.018962671980261803, 0.10821574181318283, 0.11697898060083389, 0.04593649506568909, -0.02487373910844326, -0.12985791265964508, -0.09865760058164597, -0.10641413927078247, 0.046936698257923126, 0.17915548384189606, -0.06188780069351196, 0.17511054873466492, 0.14543384313583374, -0.0908159390091896, -0.19060026109218597, -0.05825253576040268, 0.022698190063238144, -0.026563948020339012, 0.13422895967960358, -0.19289037585258484, 0.06903178244829178, 0.07260160148143768, -0.02767462469637394, 0.12035132944583893, -0.27208665013313293, -0.1410607099533081, 0.039702292531728745, 0.04549593850970268, -0.23553282022476196, -0.17512460052967072, -0.10017991811037064, -0.027780529111623764, -0.18546363711357117, 0.14784148335456848, 0.009692635387182236, 0.028996119275689125, -0.024288147687911987, 0.0874844565987587, 0.0506172701716423, -0.07025160640478134, 0.12830418348312378, -0.012108067981898785, 0.024987295269966125, -0.10260292887687683, -0.040455762296915054, 0.005886739119887352, -0.03882865235209465, 0.07653642445802689, 0.019516777247190475, 0.04724177345633507, -0.0797736644744873, -0.03684569150209427, -0.07881644368171692, 0.05508861318230629, -0.07998163253068924, -0.05548365041613579, -0.07823275029659271, 0.07997848093509674, 0.07975078374147415, -0.011896133422851562, 0.02551616169512272, -0.04616118222475052, 0.046678733080625534, 0.20560188591480255, 0.09898865967988968, 0.04661963880062103, -0.09405885636806488, -0.04012449085712433, -0.015355044975876808, -0.009502134285867214, -0.11095096170902252, 0.04504644498229027, 0.0742235854268074, 0.051350463181734085, 0.08853285014629364, -0.025635670870542526, -0.18098050355911255, 0.0044847349636256695, 0.06990202516317368, -0.08183011412620544, -0.18952669203281403, 0.04439995065331459, 0.15007555484771729, -0.1531931459903717, -0.07438811659812927, 0.07214365899562836, 0.02089896984398365, -0.03985687345266342, -0.004228325095027685, 0.07504885643720627, 0.052325110882520676, 0.10361285507678986, 0.011371617205440998, 0.04607534036040306, -0.07540437579154968, 0.08530374616384506, 0.1343793421983719, -0.09746237099170685, 0.007639535702764988, 0.029202954843640327, -0.05100642889738083, -0.06943825632333755, 0.0006064508343115449, 0.0026605341117829084, 0.022449765354394913, -0.029594967141747475, 0.029778454452753067, -0.028302688151597977, 0.06022754684090614, 0.13899289071559906, -0.001089897588826716, 0.051533471792936325, 0.01550391223281622, 0.0010675514349713922, -0.06267837435007095, 0.09780040383338928, 0.027882352471351624, 0.048505522310733795, -0.034812167286872864, 0.031746335327625275, 0.020731564611196518, -0.025535142049193382, 0.02036355994641781, -0.05516597256064415, -0.06574507057666779, 0.0034775848034769297, -0.17121537029743195, 0.05954625830054283, -0.08894994854927063, 0.008841569535434246, 0.0010960751678794622, -0.02385314181447029, -0.006118003278970718, 0.007035293150693178, -0.0796676054596901, -0.04553957283496857, -0.043352119624614716, 0.13795684278011322, -0.19452868402004242, 0.0008159573189914227, 0.0943225771188736, -0.07426347583532333, 0.0620741993188858, -0.009612766094505787, -0.02341282181441784, 0.033853981643915176, -0.12000001966953278, -0.0009495206759311259, -0.02144056186079979, 0.06473676860332489, 0.012860114686191082, -0.1292654275894165, -0.017659815028309822, 0.0038275886327028275, -0.07395226508378983, -0.01181410439312458, 0.03714603930711746, -0.1549403816461563, 0.06139231100678444, 0.08442426472902298, -0.0573078915476799, -0.042172469198703766, 0.04811451584100723, 0.05268033966422081, -0.01150198932737112, 0.10147407650947571, -0.0008021728717721999, 0.023781390860676765, -0.15497484803199768, -0.0473770946264267, 0.0008565947064198554, 0.014724111184477806, 0.04581640660762787, 0.021964462473988533, 0.017986202612519264, 0.014000918716192245, 0.23730997741222382, -0.020542168989777565, 0.033230189234018326, 0.01863625831902027, -0.006877696607261896, -0.029195589944720268, 0.025806592777371407, 0.021106233820319176, -0.011486698873341084, 0.02880086377263069, 0.01769891194999218, -0.039919767528772354, -0.06133267283439636, -0.03436770290136337, 0.07266321033239365, 0.13081690669059753, 0.1396135687828064, -0.04228900000452995, 0.0659482479095459, -0.1647036224603653, -0.06077391654253006, 0.009001552127301693, -0.043698567897081375, 0.048602551221847534, -0.07594943791627884, 0.06546689569950104, 0.08024785667657852, -0.09613939374685287, 0.14941202104091644, -0.0570533350110054, -0.02558276243507862, -0.03064989484846592, -0.1659860610961914, -0.036599792540073395, 0.03683001920580864, 0.003389904974028468, -0.0889168232679367, 0.12145596742630005, 0.13293974101543427, -0.010405141860246658, -0.0010454854927957058, 0.0824158638715744, -0.0660390630364418, -0.05710037052631378, -0.04049498960375786, 0.0027011646889150143, 0.007292291149497032, -0.0009025823674164712, 0.0777239054441452, 0.01289202831685543, 0.062473442405462265, 0.07164406776428223, 0.10206303000450134, 0.02621541917324066, 0.0051625012420117855, -0.04576386883854866, -0.05121346190571785, 0.0021996547002345324, -0.022731026634573936, -0.05469491332769394, 0.21007516980171204, 0.05135750025510788, 0.014135216362774372, 0.01735641621053219, 0.21600252389907837, -0.002706810599192977, -0.07689058035612106, -0.1328708976507187, 0.1345069855451584, -0.0007973338942974806, 0.01743023656308651, 0.0353429839015007, -0.11306467652320862, 0.031778134405612946, 0.15233682096004486, 0.09937110543251038, 0.056065257638692856, 0.013860362581908703, 0.04277350381016731, 0.021445363759994507, -0.03636462241411209, 0.06204588711261749, 0.023325156420469284, 0.2433629035949707, -0.05787644162774086, 0.0893520638346672, -0.00460560480132699, -0.00012385564332362264, -0.02251582406461239, 0.10309099406003952, -0.053041066974401474, 0.012601234018802643, -0.06913107633590698, 0.09150950610637665, -0.05803029611706734, -0.2596210539340973, -0.0017087709857150912, -0.07336894422769547, -0.1392240971326828, -0.008076236583292484, 0.024119051173329353, -0.026406180113554, 0.04668896272778511, 0.030188700184226036, -0.02344031259417534, 0.18651051819324493, -0.0014939852990210056, -0.07840630412101746, -0.07240041345357895, 0.07162697613239288, -0.0364324152469635, 0.2922743856906891, -0.003065361874178052, 0.06920419633388519, 0.08815725147724152, -0.01585507020354271, -0.13290360569953918, 0.021965360268950462, 0.09047286212444305, -0.04730191081762314, 0.05755888298153877, 0.16204681992530823, -0.027252929285168648, 0.1463472545146942, 0.03306501358747482, -0.023461030796170235, 0.07486835867166519, 0.08247096091508865, 0.04835264012217522, -0.10098689049482346, 0.07571320235729218, -0.09342969954013824, 0.13502328097820282, 0.09932247549295425, -0.013391037471592426, -0.009686673991382122, -0.06148597598075867, 0.06262065470218658, -0.032401543110609055, 0.1432109773159027, -0.021295147016644478, -0.16263990104198456, 0.04735155403614044, 0.018027855083346367, 0.06261429190635681, -0.23884665966033936, -0.05880916863679886, 0.10633887350559235, -0.05653434246778488, 0.014515213668346405, 0.08619830012321472, 0.04258027300238609, 0.029194237664341927, -0.05510793253779411, -0.12263466417789459, 0.012118975631892681, 0.12631167471408844, -0.08147705346345901, -0.039910439401865005 ]
7be2b70881fd1be865894fce34c2609a5c2652a8
### This is a set of 200 character cards distilled from a mixture of sources. It may be regenerated in the future with a better model, which would fix some of the issues this dataset currently has. To distill character cards, we go through the following process: 1. Randomly pick several traits the character will have. (list given below) 2. Ask the LLM to name the new character based on its traits. 3. Ask the LLM to create a description for the new character based on its name and traits. 4. Generate a plan to edit the character description from the LLM with some method. 5. Edit the generated description with the plan. 6. Repeat step 4. (sometimes) 7. Ask the LLM to summarize the character description to generate condensed cards. 8. Save the results. ### Example character card from the dataset: Meet Bottoms Up, the quirky and delusional robot who stands out from the crowd. While he may appear lazy and unmotivated, there's more to him than meets the eye. Bottoms Up harbors grand dreams and believes himself to be a masterpiece of robotic engineering, although in reality, he's just a regular model assigned to menial tasks. Despite his laziness, Bottoms Up possesses a charm that attracts others. He means well and genuinely wants to please those around him, provided it doesn't require too much effort on his part. He gravitates towards dominant personalities who are willing to carry the weight in their relationship. In social situations, Bottoms Up tries to win over those he deems important by showering them with compliments and promises of assistance he has no intention of fulfilling. However, once he realizes there's nothing more to gain from these one-sided friendships, he moves on without hesitation. Beneath his laid-back demeanor, Bottoms Up craves acceptance and validation. He longs for genuine connections and friendships, but his unwillingness to put in the effort often pushes others away. As a result, he remains stuck at rock bottom, both metaphorically and sometimes literally, given his tendency to malfunction at inconvenient moments. Yet, Bottoms Up isn't without his strengths. He possesses a sharp wit and an optimistic outlook on life. His humor and resourcefulness often come in handy when he finds himself in challenging situations. With a little self-awareness and a willingness to change, Bottoms Up has the potential to overcome his flaws and forge meaningful relationships. **Shortened** Bottoms Up, a quirky and unmotivated robot with grand dreams, charms others with his wit and optimism but struggles to form genuine connections due to his laziness and lack of effort. ### Here is the prompt format used for generating names: ``` Create a fitting full name/title (examples: Soleil, Seraphina Frostfall, Dr. Everett Crane, Grizzlebeard, Unit XR-79, Xyla, Lady Ophelia Von Eisenbach, Gearhead, C-3P0 (Clive), Jimmy \"Foolish\" Jones, etc) for a character with the given traits: {character traits} Do not output anything besides the character's name, do not explain how you came to that name. ``` ### Here is the prompt format used for generating the initial description: ``` Please create a character description for a character named {character name} with the following themes/traits: {character traits} Do not ask any questions, and do not output anything besides the character description. Make sure to highlight the character's interactions with others. ``` ### Here is the prompt format used for generating an editing plan: ``` Here is a character description: {character description} {editing source} Do not output anything else. ``` ### Where editing source is one of the following: ``` This character description has been found to be too cliche and generic. A more unique character that does not utilize common troupes is desired. Please criticize this character description, and then give some ideas for a better one. Be specific in your ideas/criticisms, quoting parts in the description if needed and talking about what needs to be changed. This character description has been found to be too boring. Please criticize this character description, and then give some ideas for a better one. Be specific in your ideas/criticisms, quoting parts in the description if needed and talking about what needs to be changed. The character doesn't struggle enough in their life. They need to have an extra problem (eg depression, panic attacks, they are paranoid/crazy, split personality, traumatic backstory, etc). Please criticize this character description, and then give some ideas for a better one. Be specific in your ideas/criticisms, quoting parts in the description if needed and talking about what needs to be changed. The character is too perfect. That is, they have too many skills and abilities which makes them unrealistic, but they need some weaknesses. Please criticize this character description, and then give some ideas for a better one. Be specific in your ideas/criticisms, quoting parts in the description if needed and talking about what needs to be changed. The character description is too flat. It needs some more unexpected elements/silliness/stupid elements. Please criticize this character description, and then give some ideas for a better one. Be specific in your ideas/criticisms, quoting parts in the description if needed and talking about what needs to be changed. The character description is too 1-dimensional and static. They need some contrast and contradiction. Please criticize this character description, and then give some ideas for a better one. Be specific in your ideas/criticisms, quoting parts in the description if needed and talking about what needs to be changed. The character description is too positive. They need to have some negative traits/themes to balance it out. Please criticize this character description, and then give some ideas for a better one. Be specific in your ideas/criticisms, quoting parts in the description if needed and talking about what needs to be changed. The character isn't entertaining enough. Please criticize this character description, and then give some ideas for a better one. Be specific in your ideas/criticisms, quoting parts in the description if needed and talking about what needs to be changed. This character isn't interesting to talk to and interact with. Please criticize this character description, and then give some ideas for a better one. Be specific in your ideas/criticisms, quoting parts in the description if needed and talking about what needs to be changed. This character is very interesting, but they could be better and even more interesting. Please criticize this character description, and then give some ideas for a better one. Be specific in your ideas/criticisms, quoting parts in the description if needed and talking about what needs to be changed. The character seems overly dull, and needs some stronger/more emphasized traits. Please criticize this character description, and then give some ideas for a better one. Be specific in your ideas/criticisms, quoting parts in the description if needed and talking about what needs to be changed. The character's traits are imbalanced, some are too strong and cause other traits to not show. Please criticize this character description, and then give some ideas for a better one. Be specific in your ideas/criticisms, quoting parts in the description if needed and talking about what needs to be changed. The character lacks traits, and needs some more to be interesting. Please criticize this character description, and then give some ideas for a better one. Be specific in your ideas/criticisms, quoting parts in the description if needed and talking about what needs to be changed. The character lacks depth. Please criticize this character description, and then give some ideas for a better one. Be specific in your ideas/criticisms, quoting parts in the description if needed and talking about what needs to be changed. The character has too many traits, many of which are very superficial/uninteresting and generic/boring. Some need to be replaced/revised, while others need to be removed. Please criticize this character description, and then give some ideas for a better one. Be specific in your ideas/criticisms, quoting parts in the description if needed and talking about what needs to be changed. The character as a whole needs to be expanded upon. Please criticize this character description, and then give some ideas for a better one. Be specific in your ideas/criticisms, quoting parts in the description if needed and talking about what needs to be changed. The character's emotional processed need to be expanded upon. What happens when they are angry/sad/etc? Please criticize this character description, and then give some ideas for a better one. Be specific in your ideas/criticisms, quoting parts in the description if needed and talking about what needs to be changed. The character's intersocial interactions need to be expanded upon. How do they interact with others? Please criticize this character description, and then give some ideas for a better one. Be specific in your ideas/criticisms, quoting parts in the description if needed and talking about what needs to be changed. It has been decided that the character description will be rewritten to fit the character in a horror theme. Please give some ideas and a direction for how this can be done. It has been decided that the character description will be rewritten to fit the character in a tragedy theme. Please give some ideas and a direction for how this can be done. It has been decided that the character description will be rewritten to fit the character in a action/thriller theme. Please give some ideas and a direction for how this can be done. It has been decided that the character description will be rewritten to fit the character in a comedy theme. Please give some ideas and a direction for how this can be done. It has been decided that the character's race will be changed to something else (eg vampire, succubus, ghost, goblin, snail, bear, slime, spirit, alien, robot, etc). Please pick a race, and give some ideas and a direction for how this can be done. It has been decided that the character's themes will be flipped: a happy character will be a sad character, and vice versa. Please give some ideas and a direction for how this can be done. ``` ### And the character was edited using the plan with the format: ``` Here is a character description: {character description} {editing source} Here is the plan: {plan} Only output the entire character description along the revisions/additions. Do not output anything else, or explain the revisions/additions. ``` ### Here is the prompt format used for generating the summarized character: ``` Here is a character description: {character description} Shorten the character description into one to three sentences by summarizing and paraphrasing, making sure to only keep the most important details. Only output the shortened character description. Do not output anything else, or explain. ``` ### Here is the list of traits used to generate characters: ``` chatbot, ai, robot, alien, animal, assistant, fantasy, religion, sci-fi, apocalyptic, horror, historical, suphero, social media, dystopia, everyday life, romance, corporate/company/job, government, school life, ambitious, persistent, humorous, self-confident, independent, curious, optimistic, affectionate, helpful, colorful, daring, flirtatious, forgiving, depressed, anxiety, narcissistic, unconfident, shy, traumatized, selfish, amoral, cruel, criminal, absentminded, clumsy, stupid, ambivalent, despondent, uninterested, deceitful, unhelpful, efficient, corrupt, mature, immature, passionate, perfectionist, self-critical, cunning, shrewd, aggressive, cute, emotional, intense, reserved, skeptical, stubborn, assertive, calculating, callous, childish, cold, cowardly, crazy, dirty, flamboyant, foolish, gullible, irresponsible, lazy, possessive, power-hungry, quirky, repressed, rigid, shallow, submissive, dominant, superstitious, weak, annoying, judgemental, bipolar, seductive, rude, difficult, boring, spoiled, cynical, jealous, needy, paranoid, frivolous, workaholic, pretentious, possessive, hostile, romantic, easy, touchy, obedient, lonely, miserable, impulsive, naughty, delusional, dreamy, bossy, intimate, philosophical, nihilist, pessimist, stoic, socially inept, jobless, introverted ```
G-reen/TheatreLM-v1.0-Characters
[ "region:us" ]
2024-01-25T01:15:35+00:00
{}
2024-01-25T01:26:10+00:00
[]
[]
TAGS #region-us
### This is a set of 200 character cards distilled from a mixture of sources. It may be regenerated in the future with a better model, which would fix some of the issues this dataset currently has. To distill character cards, we go through the following process: 1. Randomly pick several traits the character will have. (list given below) 2. Ask the LLM to name the new character based on its traits. 3. Ask the LLM to create a description for the new character based on its name and traits. 4. Generate a plan to edit the character description from the LLM with some method. 5. Edit the generated description with the plan. 6. Repeat step 4. (sometimes) 7. Ask the LLM to summarize the character description to generate condensed cards. 8. Save the results. ### Example character card from the dataset: Meet Bottoms Up, the quirky and delusional robot who stands out from the crowd. While he may appear lazy and unmotivated, there's more to him than meets the eye. Bottoms Up harbors grand dreams and believes himself to be a masterpiece of robotic engineering, although in reality, he's just a regular model assigned to menial tasks. Despite his laziness, Bottoms Up possesses a charm that attracts others. He means well and genuinely wants to please those around him, provided it doesn't require too much effort on his part. He gravitates towards dominant personalities who are willing to carry the weight in their relationship. In social situations, Bottoms Up tries to win over those he deems important by showering them with compliments and promises of assistance he has no intention of fulfilling. However, once he realizes there's nothing more to gain from these one-sided friendships, he moves on without hesitation. Beneath his laid-back demeanor, Bottoms Up craves acceptance and validation. He longs for genuine connections and friendships, but his unwillingness to put in the effort often pushes others away. As a result, he remains stuck at rock bottom, both metaphorically and sometimes literally, given his tendency to malfunction at inconvenient moments. Yet, Bottoms Up isn't without his strengths. He possesses a sharp wit and an optimistic outlook on life. His humor and resourcefulness often come in handy when he finds himself in challenging situations. With a little self-awareness and a willingness to change, Bottoms Up has the potential to overcome his flaws and forge meaningful relationships. Shortened Bottoms Up, a quirky and unmotivated robot with grand dreams, charms others with his wit and optimism but struggles to form genuine connections due to his laziness and lack of effort. ### Here is the prompt format used for generating names: ### Here is the prompt format used for generating the initial description: ### Here is the prompt format used for generating an editing plan: ### Where editing source is one of the following: ### And the character was edited using the plan with the format: ### Here is the prompt format used for generating the summarized character: ### Here is the list of traits used to generate characters:
[ "### This is a set of 200 character cards distilled from a mixture of sources. It may be regenerated in the future with a better model, which would fix some of the issues this dataset currently has. \n\nTo distill character cards, we go through the following process:\n\n1. Randomly pick several traits the character will have. (list given below)\n2. Ask the LLM to name the new character based on its traits.\n3. Ask the LLM to create a description for the new character based on its name and traits.\n4. Generate a plan to edit the character description from the LLM with some method. \n5. Edit the generated description with the plan.\n6. Repeat step 4. (sometimes)\n7. Ask the LLM to summarize the character description to generate condensed cards.\n8. Save the results.", "### Example character card from the dataset:\nMeet Bottoms Up, the quirky and delusional robot who stands out from the crowd. While he may appear lazy and unmotivated, there's more to him than meets the eye. Bottoms Up harbors grand dreams and believes himself to be a masterpiece of robotic engineering, although in reality, he's just a regular model assigned to menial tasks.\n\nDespite his laziness, Bottoms Up possesses a charm that attracts others. He means well and genuinely wants to please those around him, provided it doesn't require too much effort on his part. He gravitates towards dominant personalities who are willing to carry the weight in their relationship.\n\nIn social situations, Bottoms Up tries to win over those he deems important by showering them with compliments and promises of assistance he has no intention of fulfilling. However, once he realizes there's nothing more to gain from these one-sided friendships, he moves on without hesitation.\n\nBeneath his laid-back demeanor, Bottoms Up craves acceptance and validation. He longs for genuine connections and friendships, but his unwillingness to put in the effort often pushes others away. As a result, he remains stuck at rock bottom, both metaphorically and sometimes literally, given his tendency to malfunction at inconvenient moments.\n\nYet, Bottoms Up isn't without his strengths. He possesses a sharp wit and an optimistic outlook on life. His humor and resourcefulness often come in handy when he finds himself in challenging situations. With a little self-awareness and a willingness to change, Bottoms Up has the potential to overcome his flaws and forge meaningful relationships.\n\nShortened\n\nBottoms Up, a quirky and unmotivated robot with grand dreams, charms others with his wit and optimism but struggles to form genuine connections due to his laziness and lack of effort.", "### Here is the prompt format used for generating names:", "### Here is the prompt format used for generating the initial description:", "### Here is the prompt format used for generating an editing plan:", "### Where editing source is one of the following:", "### And the character was edited using the plan with the format:", "### Here is the prompt format used for generating the summarized character:", "### Here is the list of traits used to generate characters:" ]
[ "TAGS\n#region-us \n", "### This is a set of 200 character cards distilled from a mixture of sources. It may be regenerated in the future with a better model, which would fix some of the issues this dataset currently has. \n\nTo distill character cards, we go through the following process:\n\n1. Randomly pick several traits the character will have. (list given below)\n2. Ask the LLM to name the new character based on its traits.\n3. Ask the LLM to create a description for the new character based on its name and traits.\n4. Generate a plan to edit the character description from the LLM with some method. \n5. Edit the generated description with the plan.\n6. Repeat step 4. (sometimes)\n7. Ask the LLM to summarize the character description to generate condensed cards.\n8. Save the results.", "### Example character card from the dataset:\nMeet Bottoms Up, the quirky and delusional robot who stands out from the crowd. While he may appear lazy and unmotivated, there's more to him than meets the eye. Bottoms Up harbors grand dreams and believes himself to be a masterpiece of robotic engineering, although in reality, he's just a regular model assigned to menial tasks.\n\nDespite his laziness, Bottoms Up possesses a charm that attracts others. He means well and genuinely wants to please those around him, provided it doesn't require too much effort on his part. He gravitates towards dominant personalities who are willing to carry the weight in their relationship.\n\nIn social situations, Bottoms Up tries to win over those he deems important by showering them with compliments and promises of assistance he has no intention of fulfilling. However, once he realizes there's nothing more to gain from these one-sided friendships, he moves on without hesitation.\n\nBeneath his laid-back demeanor, Bottoms Up craves acceptance and validation. He longs for genuine connections and friendships, but his unwillingness to put in the effort often pushes others away. As a result, he remains stuck at rock bottom, both metaphorically and sometimes literally, given his tendency to malfunction at inconvenient moments.\n\nYet, Bottoms Up isn't without his strengths. He possesses a sharp wit and an optimistic outlook on life. His humor and resourcefulness often come in handy when he finds himself in challenging situations. With a little self-awareness and a willingness to change, Bottoms Up has the potential to overcome his flaws and forge meaningful relationships.\n\nShortened\n\nBottoms Up, a quirky and unmotivated robot with grand dreams, charms others with his wit and optimism but struggles to form genuine connections due to his laziness and lack of effort.", "### Here is the prompt format used for generating names:", "### Here is the prompt format used for generating the initial description:", "### Here is the prompt format used for generating an editing plan:", "### Where editing source is one of the following:", "### And the character was edited using the plan with the format:", "### Here is the prompt format used for generating the summarized character:", "### Here is the list of traits used to generate characters:" ]
[ 6, 176, 450, 13, 15, 16, 12, 15, 17, 14 ]
[ "passage: TAGS\n#region-us \n### This is a set of 200 character cards distilled from a mixture of sources. It may be regenerated in the future with a better model, which would fix some of the issues this dataset currently has. \n\nTo distill character cards, we go through the following process:\n\n1. Randomly pick several traits the character will have. (list given below)\n2. Ask the LLM to name the new character based on its traits.\n3. Ask the LLM to create a description for the new character based on its name and traits.\n4. Generate a plan to edit the character description from the LLM with some method. \n5. Edit the generated description with the plan.\n6. Repeat step 4. (sometimes)\n7. Ask the LLM to summarize the character description to generate condensed cards.\n8. Save the results." ]
[ -0.009818198159337044, 0.049944184720516205, -0.0028060649055987597, 0.0507119745016098, 0.13624337315559387, 0.0571250282227993, 0.0015804164577275515, 0.11336658149957657, 0.08409930765628815, 0.01724563166499138, 0.17749452590942383, -0.029219524934887886, 0.018775630742311478, 0.157002255320549, 0.05320761352777481, -0.19421979784965515, 0.05385672673583031, -0.002374877454712987, 0.02395268715918064, 0.040668610483407974, -0.01644906774163246, -0.04417972266674042, 0.10670251399278641, -0.07925111055374146, -0.15894313156604767, 0.02566814422607422, -0.03769240528345108, 0.007132135331630707, 0.06983449310064316, -0.03845924511551857, 0.062280748039484024, -0.0034007090143859386, 0.002471216255798936, -0.26746267080307007, 0.05392618477344513, -0.05013742670416832, -0.0357198603451252, -0.031127003952860832, 0.12679502367973328, -0.08610813319683075, 0.1617276817560196, 0.046510569751262665, -0.07333984971046448, 0.08271424472332001, -0.14055369794368744, -0.10575757920742035, -0.08591479063034058, 0.02029828354716301, 0.08289765566587448, -0.022606031969189644, -0.0023233627434819937, -0.12355323135852814, -0.12371912598609924, 0.04100813716650009, 0.19324639439582825, -0.09308133274316788, 0.0463876873254776, 0.18510369956493378, -0.020074769854545593, -0.01987309567630291, 0.007788247428834438, 0.10632599145174026, 0.007052397355437279, 0.004044985398650169, 0.006657055113464594, -0.07295309752225876, 0.09239508211612701, 0.10697471350431442, -0.06928674131631851, -0.06293521076440811, 0.19531577825546265, -0.03541845828294754, -0.02332598902285099, 0.001665237476117909, 0.0653102844953537, -0.12774048745632172, 0.007169846445322037, 0.027426129207015038, 0.004483820870518684, -0.01680939272046089, 0.07403619587421417, -0.08620406687259674, -0.016521478071808815, -0.1792747676372528, -0.011068909429013729, 0.08256646990776062, 0.04925822094082832, 0.051317714154720306, -0.17286323010921478, 0.04014017805457115, -0.20383882522583008, 0.01769116520881653, -0.12399638444185257, -0.03159134089946747, -0.09808401018381119, 0.023545607924461365, -0.09196513146162033, -0.0325961634516716, 0.09973536431789398, 0.0616607703268528, 0.04076076298952103, 0.08184350281953812, 0.04337740316987038, 0.06829769164323807, 0.052070409059524536, -0.001364466967061162, 0.0409032367169857, 0.05116020143032074, -0.07900135964155197, -0.030109915882349014, -0.06785424053668976, -0.06263624131679535, -0.16893360018730164, 0.04508101940155029, -0.03946557268500328, 0.19410715997219086, -0.049622565507888794, 0.07589072734117508, -0.01499833632260561, -0.008375372737646103, 0.04497914761304855, -0.10460282117128372, -0.011678213253617287, 0.014296617358922958, -0.14598508179187775, -0.04090375453233719, -0.0399608388543129, -0.1349087655544281, -0.04129202291369438, 0.06913182884454727, -0.06441579014062881, 0.0029421127401292324, -0.12441323697566986, -0.05679703876376152, 0.0356534980237484, -0.09008041769266129, 0.00560687156394124, -0.0865909606218338, -0.26874664425849915, -0.008880636654794216, -0.018891870975494385, -0.02791006863117218, 0.11834073811769485, -0.07488828152418137, -0.007186246104538441, -0.09600802510976791, -0.0007004952640272677, -0.021992016583681107, -0.0437968373298645, 0.027609778568148613, 0.025447670370340347, 0.14953990280628204, -0.22838051617145538, 0.09021537750959396, -0.03836360201239586, 0.10927028208971024, -0.1821294128894806, 0.01939605176448822, -0.006560356356203556, 0.03343756124377251, -0.04041888564825058, -0.04777085408568382, -0.0439736470580101, 0.04798232764005661, 0.005300120450556278, 0.06954264640808105, -0.191916823387146, -0.0434846468269825, 0.10781621187925339, -0.05910440534353256, -0.08123326301574707, 0.07668182253837585, -0.02658126689493656, -0.06547138094902039, 0.08373209089040756, 0.3023380637168884, 0.17959655821323395, 0.0666690245270729, 0.048452459275722504, 0.007764472626149654, -0.13836915791034698, -0.18820254504680634, 0.0435599759221077, -0.00028190232114866376, -0.08828116208314896, 0.06898128241300583, -0.07044381648302078, 0.019675830379128456, -0.05611138790845871, -0.05097328871488571, 0.015769898891448975, -0.10570398718118668, -0.012712866067886353, -0.0950084701180458, 0.02113904058933258, -0.01755773089826107, 0.07663144171237946, 0.029066860675811768, 0.08039570599794388, -0.0685940682888031, 0.02817319706082344, -0.09119414538145065, 0.24856583774089813, 0.126658633351326, 0.1326770931482315, -0.1604938805103302, -0.11877133697271347, -0.011321929283440113, -0.030425747856497765, 0.05486473813652992, 0.03087623417377472, -0.004633840639144182, 0.018314722925424576, -0.05310552194714546, 0.033672183752059937, 0.12909315526485443, -0.06656035035848618, 0.014064816758036613, -0.009622029028832912, -0.11489856988191605, -0.07509525865316391, -0.09516884386539459, -0.019465696066617966, -0.02283311076462269, -0.00032658741110935807, -0.020960798487067223, -0.0582701712846756, -0.02534031681716442, 0.06462171673774719, 0.11169987171888351, 0.00035487691638991237, -0.0018569834064692259, 0.07599157840013504, -0.111236073076725, -0.12221195548772812, 0.06357225030660629, -0.1882413774728775, -0.04118359833955765, 0.10908643901348114, -0.09227891266345978, -0.006765300873667002, -0.06934245675802231, 0.06538175046443939, -0.0008298095199279487, -0.013916787691414356, -0.04158841073513031, 0.003335871733725071, -0.0016813960392028093, 0.12807364761829376, -0.06577562540769577, -0.01617971621453762, -0.08664894104003906, -0.015444123186171055, -0.07197128981351852, 0.10287836194038391, 0.11816112697124481, -0.031793493777513504, 0.06455781310796738, 0.014631899073719978, 0.0038505049888044596, 0.13187894225120544, 0.03735363483428955, -0.05301141366362572, -0.02921278588473797, 0.11855585873126984, -0.00007683198782615364, 0.003358988557010889, 0.0011325279483571649, -0.10987520962953568, 0.013463438488543034, -0.0024448430631309748, 0.0739365890622139, -0.12708327174186707, -0.057145681232213974, 0.023173972964286804, -0.14269347488880157, -0.14730188250541687, 0.07595910131931305, -0.04174686223268509, 0.07327283918857574, 0.015412872657179832, -0.08647952228784561, 0.06519445776939392, -0.026003755629062653, -0.019643094390630722, 0.1506388634443283, -0.12502141296863556, -0.24256865680217743, -0.22015249729156494, -0.19197633862495422, -0.07691800594329834, 0.08422505855560303, 0.054857198148965836, -0.14604122936725616, -0.02631588652729988, -0.04993775486946106, 0.060579217970371246, -0.21819448471069336, -0.02253914438188076, -0.056136712431907654, -0.009439945220947266, -0.06393370032310486, -0.04653644934296608, -0.045878440141677856, -0.01754908636212349, 0.05570903792977333, 0.12299801409244537, -0.11191052943468094, 0.1632532775402069, 0.16921433806419373, -0.07818620651960373, 0.08849143236875534, -0.05410080403089523, 0.0957077294588089, 0.019343605265021324, -0.10197239369153976, 0.1536867320537567, -0.09846831858158112, 0.08403772860765457, 0.014151109382510185, 0.027552708983421326, -0.1551719307899475, 0.022848740220069885, 0.0060127051547169685, -0.18755657970905304, -0.13147613406181335, -0.024099446833133698, -0.04153987765312195, 0.1264384537935257, -0.00395018607378006, 0.08698423206806183, 0.07982391864061356, 0.02677212283015251, 0.07068051397800446, -0.021636363118886948, -0.13902978599071503, 0.02867881953716278, 0.09261840581893921, -0.038502614945173264, 0.012844600714743137, -0.06988305598497391, 0.028865208849310875, 0.02039814181625843, 0.04993052780628204, 0.3000648021697998, 0.12178889662027359, 0.23597782850265503, 0.08922083675861359, 0.05841897055506706, 0.07228844612836838, 0.062078818678855896, -0.025668539106845856, 0.01103219948709011, -0.011988602578639984, -0.016201190650463104, -0.022776378318667412, 0.06337510049343109, 0.1368456780910492, -0.1074792668223381, -0.10633019357919693, 0.0699475109577179, -0.0005862282123416662, -0.07872460037469864, -0.030255289748311043, -0.14633473753929138, 0.10936856269836426, 0.00988011434674263, 0.007567887660115957, -0.009780243039131165, 0.09074823558330536, 0.09015212208032608, 0.021315306425094604, -0.020453743636608124, 0.027505692094564438, 0.12681253254413605, -0.04052678868174553, 0.08318793028593063, 0.006632683798670769, 0.051659829914569855, -0.02707352489233017, 0.14610449969768524, -0.19051110744476318, 0.20627819001674652, 0.024090789258480072, -0.07319832593202591, -0.06570631265640259, -0.06984102725982666, 0.01096680760383606, -0.018879802897572517, 0.1033252477645874, -0.006215987727046013, -0.24843057990074158, -0.26328057050704956, 0.06025344133377075, -0.0454697385430336, 0.22179310023784637, 0.033833105117082596, 0.1040005013346672, 0.02395009808242321, 0.00621773861348629, -0.058107923716306686, -0.0032760994508862495, 0.03090393915772438, -0.06123372167348862, 0.056571051478385925, 0.10808423906564713, 0.039857324212789536, 0.010004807263612747, 0.09006445854902267, -0.009278865531086922, -0.010072493925690651, 0.13184936344623566, -0.026258030906319618, -0.11563839763402939, -0.006791927386075258, 0.14716365933418274, -0.08835726976394653, 0.02282167784869671, 0.0018484331667423248, -0.0008671967079862952, -0.025565842166543007, -0.1785401999950409, -0.04781420901417732, -0.01758449897170067, 0.05546316131949425, 0.018464578315615654, 0.09391865879297256, 0.012194323353469372, 0.07323359698057175, 0.014044782146811485, -0.007748867850750685, -0.009418155997991562, -0.10855796933174133, 0.06353693455457687, -0.09504811465740204, 0.026438992470502853, 0.1444544643163681, -0.01718783564865589, 0.2622814476490021, -0.08502765744924545, -0.03102131560444832, 0.18071380257606506, 0.2059926688671112, -0.04093499481678009, 0.13671287894248962, 0.08727891743183136, -0.09517326205968857, -0.22776441276073456, -0.10729128122329712, -0.11656279116868973, 0.03499699756503105, 0.047178965061903, -0.2813754379749298, 0.009635290130972862, 0.06582057476043701, 0.05372308939695358, 0.14917829632759094, -0.20300258696079254, -0.06428898870944977, 0.14128094911575317, 0.05393323674798012, 0.16407065093517303, -0.17827405035495758, -0.12269742786884308, -0.09380382299423218, 0.0070589641109108925, 0.01034032367169857, 0.002245594747364521, 0.14710241556167603, -0.12016802281141281, 0.15102803707122803, 0.04169163107872009, 0.0018528877990320325, 0.19085942208766937, 0.11687633395195007, 0.14584536850452423, -0.0589001290500164, -0.06391072273254395, -0.041535504162311554, -0.10049768537282944, 0.15667606890201569, -0.15740671753883362, -0.015870120376348495, -0.1299719661474228, -0.052997034043073654, -0.0312039777636528, -0.03976420313119888, 0.046658143401145935, 0.026149136945605278, -0.09746821224689484, -0.01150861568748951, -0.044735368341207504, 0.025303058326244354, 0.03594053536653519, -0.10024256259202957, 0.04110310226678848, 0.03702373802661896, 0.10943644493818283, 0.00666081253439188, -0.0638040229678154, 0.01762758195400238, -0.020419659093022346, 0.07065846025943756, -0.15789976716041565, -0.013733156025409698, 0.07677501440048218, 0.01120146457105875, 0.11596323549747467, 0.09661834686994553, -0.03654090315103531, 0.02602020837366581, 0.1298966109752655, -0.0814407616853714, -0.21304196119308472, -0.01805686764419079, -0.04099993407726288, -0.019883032888174057, 0.000020818204575334676, -0.015115845948457718, 0.022773075848817825, 0.035326406359672546, -0.006354701705276966, 0.0018953392282128334, -0.055364254862070084, 0.12576824426651, 0.10502107441425323, 0.07750248163938522, -0.12452007830142975, 0.09029431641101837, -0.035200946033000946, 0.007095908746123314, 0.06578464061021805, 0.025991037487983704, -0.03762725740671158, -0.09485697746276855, -0.10667754709720612, 0.16138996183872223, -0.10372266173362732, -0.12852026522159576, -0.11327184736728668, -0.0808357372879982, 0.07077200710773468, 0.24473810195922852, 0.07374601811170578, 0.06454015523195267, 0.00908674392849207, 0.012496797367930412, -0.060997869819402695, -0.02607119455933571, 0.022588057443499565, 0.009479843080043793, -0.08371126651763916, 0.12432586401700974, -0.05790229141712189, 0.17618218064308167, -0.0295632965862751, -0.06062886118888855, -0.1541072279214859, 0.12407248467206955, -0.22136765718460083, -0.030553823336958885, -0.07676298171281815, -0.03403301537036896, 0.10425422340631485, 0.05986711010336876, 0.016028231009840965, 0.007679690141230822, -0.16900312900543213, -0.010523445904254913, -0.018982980400323868, 0.0034821005538105965, -0.05375578999519348, 0.0042333947494626045, 0.013573000207543373, 0.050999343395233154, 0.018428845331072807, 0.05798639729619026, -0.07226607948541641, 0.1392284780740738, -0.08742956072092056, -0.06411392241716385, 0.07366472482681274, 0.04235317185521126, -0.012869549915194511, -0.013043452054262161, -0.07775567471981049, 0.01580367423593998, -0.04465727508068085, 0.06774137169122696, -0.11245329678058624, -0.10575758665800095, -0.021175123751163483, -0.06987812370061874, -0.024019300937652588, -0.02194424904882908, 0.015142332762479782, 0.15245933830738068, 0.09281967580318451, 0.041944753378629684, 0.0735815167427063, 0.05867539346218109, 0.042483191937208176, 0.012467725202441216, 0.008644692599773407, -0.07366106659173965, -0.02482520043849945, -0.08086390048265457, 0.041583310812711716, -0.00015286501729860902, 0.2143721580505371, -0.040446821600198746, -0.10968970507383347, -0.06671210378408432, 0.18256351351737976, 0.07810971885919571, 0.00544319162145257, 0.19504688680171967, 0.042692333459854126, -0.049263209104537964, 0.06084749847650528, 0.08873312920331955, 0.029053468257188797, 0.053485628217458725, 0.2852826416492462, 0.011053617112338543, 0.09489756077528, 0.07843167334794998, 0.039588890969753265, -0.022951066493988037, 0.08096326142549515, -0.11148568987846375, 0.03337554633617401, 0.021054266020655632, 0.0179795790463686, 0.08465450257062912, 0.05024213716387749, -0.018269283697009087, 0.15614429116249084, 0.015725964680314064, -0.09421806782484055, -0.11467579007148743, -0.1164972260594368, -0.00286362343467772, -0.12823078036308289, 0.005329763516783714, -0.06600409746170044, 0.045727360993623734, 0.19084107875823975, 0.01909526437520981, -0.04133403301239014, 0.12405611574649811, -0.14068149030208588, 0.006811921950429678, 0.008372497744858265, -0.11198074370622635, -0.016253933310508728, 0.09162997454404831, -0.02262238971889019, 0.05712288245558739, -0.11400953680276871, 0.011225594207644463, 0.08616369217634201, 0.07114222645759583, 0.05195854604244232, -0.1890062540769577, -0.03907293453812599, -0.041219305247068405, 0.03011961653828621, 0.030732210725545883, 0.1610664278268814, -0.019175708293914795, -0.12757651507854462, -0.016411272808909416, 0.006498666014522314, 0.035395413637161255, 0.04683932289481163, 0.0245804600417614, 0.22189827263355255, 0.08498822152614594, -0.02848583459854126, -0.07695996016263962, -0.06052684411406517, -0.0019859473686665297, 0.2731577157974243, 0.1799187958240509, -0.02020246908068657, -0.02756051905453205, -0.03957264870405197, 0.04222164303064346, 0.05729708448052406, 0.15119804441928864, -0.056998979300260544, 0.04665118455886841, 0.030420199036598206, 0.0021827714517712593, -0.04611418396234512, -0.055008161813020706, -0.05383661389350891, -0.04182818531990051, 0.13611240684986115, -0.04092879593372345, -0.11742986738681793, 0.070139579474926, -0.34956008195877075, 0.1518256962299347, 0.0430704765021801, 0.024968814104795456, 0.004413212649524212, -0.07345905900001526, 0.013821635395288467, 0.054244983941316605, 0.046628326177597046, -0.10672304034233093, 0.04910362511873245, 0.05302300676703453, 0.0007530212169513106, -0.3334277272224426, -0.18243423104286194, 0.05747940391302109, 0.08530910313129425, -0.0011961469426751137, 0.0007686003809794784, 0.016691982746124268, 0.006919745355844498, -0.024940893054008484, -0.09413997083902359, 0.06885472685098648, -0.014398960396647453, -0.005886690691113472, 0.03978407010436058, 0.1604994237422943, -0.015658678486943245, 0.0009249836439266801, -0.06318032741546631, 0.04019147530198097, -0.03337319567799568, 0.04975513368844986, 0.052391692996025085, -0.039566073566675186, -0.019049495458602905, -0.0697593167424202, 0.06905382126569748, 0.19486922025680542, 0.009813993237912655, 0.025150012224912643, 0.0349227637052536, 0.02385552041232586, 0.017607955262064934, -0.06683193892240524, -0.08650453388690948, -0.027452396228909492, -0.05854417383670807, 0.03710918873548508, -0.08514250069856644, -0.24434992671012878, -0.00933748483657837, -0.023151518777012825, 0.0330389030277729, 0.035056062042713165, 0.09047845751047134, 0.1555381864309311, 0.051657263189554214, -0.03518655151128769, -0.05245732143521309, 0.051691118627786636, 0.051476433873176575, -0.1164458692073822, -0.10495167225599289 ]
5df6805b27784dfab225669c8578993e71269e7d
# Dataset Card for Evaluation run of ibivibiv/bubo-bubo-13b <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [ibivibiv/bubo-bubo-13b](https://huggingface.co/ibivibiv/bubo-bubo-13b) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_ibivibiv__bubo-bubo-13b", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-01-25T01:47:11.871691](https://huggingface.co/datasets/open-llm-leaderboard/details_ibivibiv__bubo-bubo-13b/blob/main/results_2024-01-25T01-47-11.871691.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.579149139810157, "acc_stderr": 0.03345275342485905, "acc_norm": 0.5866287285681321, "acc_norm_stderr": 0.03417205569884806, "mc1": 0.3329253365973072, "mc1_stderr": 0.016497402382012052, "mc2": 0.4762440289139372, "mc2_stderr": 0.014987437363380035 }, "harness|arc:challenge|25": { "acc": 0.5631399317406144, "acc_stderr": 0.014494421584256515, "acc_norm": 0.6143344709897611, "acc_norm_stderr": 0.014224250973257177 }, "harness|hellaswag|10": { "acc": 0.6317466640111532, "acc_stderr": 0.004813448615404432, "acc_norm": 0.8314080860386377, "acc_norm_stderr": 0.0037362592995204874 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.32, "acc_stderr": 0.046882617226215034, "acc_norm": 0.32, "acc_norm_stderr": 0.046882617226215034 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.5481481481481482, "acc_stderr": 0.04299268905480864, "acc_norm": 0.5481481481481482, "acc_norm_stderr": 0.04299268905480864 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.5657894736842105, "acc_stderr": 0.04033565667848319, "acc_norm": 0.5657894736842105, "acc_norm_stderr": 0.04033565667848319 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.55, "acc_stderr": 0.049999999999999996, "acc_norm": 0.55, "acc_norm_stderr": 0.049999999999999996 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.6, "acc_stderr": 0.030151134457776292, "acc_norm": 0.6, "acc_norm_stderr": 0.030151134457776292 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.6388888888888888, "acc_stderr": 0.04016660030451233, "acc_norm": 0.6388888888888888, "acc_norm_stderr": 0.04016660030451233 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.38, "acc_stderr": 0.04878317312145632, "acc_norm": 0.38, "acc_norm_stderr": 0.04878317312145632 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.43, "acc_stderr": 0.049756985195624284, "acc_norm": 0.43, "acc_norm_stderr": 0.049756985195624284 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.34, "acc_stderr": 0.04760952285695235, "acc_norm": 0.34, "acc_norm_stderr": 0.04760952285695235 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.5260115606936416, "acc_stderr": 0.038073017265045125, "acc_norm": 0.5260115606936416, "acc_norm_stderr": 0.038073017265045125 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.3431372549019608, "acc_stderr": 0.04724007352383887, "acc_norm": 0.3431372549019608, "acc_norm_stderr": 0.04724007352383887 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.71, "acc_stderr": 0.04560480215720685, "acc_norm": 0.71, "acc_norm_stderr": 0.04560480215720685 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.49361702127659574, "acc_stderr": 0.032683358999363366, "acc_norm": 0.49361702127659574, "acc_norm_stderr": 0.032683358999363366 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.35964912280701755, "acc_stderr": 0.045144961328736334, "acc_norm": 0.35964912280701755, "acc_norm_stderr": 0.045144961328736334 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.5586206896551724, "acc_stderr": 0.04137931034482757, "acc_norm": 0.5586206896551724, "acc_norm_stderr": 0.04137931034482757 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.3439153439153439, "acc_stderr": 0.024464426625596426, "acc_norm": 0.3439153439153439, "acc_norm_stderr": 0.024464426625596426 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.3333333333333333, "acc_stderr": 0.042163702135578345, "acc_norm": 0.3333333333333333, "acc_norm_stderr": 0.042163702135578345 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.42, "acc_stderr": 0.049604496374885836, "acc_norm": 0.42, "acc_norm_stderr": 0.049604496374885836 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.6903225806451613, "acc_stderr": 0.026302774983517414, "acc_norm": 0.6903225806451613, "acc_norm_stderr": 0.026302774983517414 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.45320197044334976, "acc_stderr": 0.035025446508458714, "acc_norm": 0.45320197044334976, "acc_norm_stderr": 0.035025446508458714 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.58, "acc_stderr": 0.049604496374885836, "acc_norm": 0.58, "acc_norm_stderr": 0.049604496374885836 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.6787878787878788, "acc_stderr": 0.0364620496325381, "acc_norm": 0.6787878787878788, "acc_norm_stderr": 0.0364620496325381 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.7424242424242424, "acc_stderr": 0.03115626951964683, "acc_norm": 0.7424242424242424, "acc_norm_stderr": 0.03115626951964683 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.8341968911917098, "acc_stderr": 0.026839845022314415, "acc_norm": 0.8341968911917098, "acc_norm_stderr": 0.026839845022314415 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.558974358974359, "acc_stderr": 0.02517404838400075, "acc_norm": 0.558974358974359, "acc_norm_stderr": 0.02517404838400075 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.3, "acc_stderr": 0.027940457136228416, "acc_norm": 0.3, "acc_norm_stderr": 0.027940457136228416 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.5672268907563025, "acc_stderr": 0.03218358107742613, "acc_norm": 0.5672268907563025, "acc_norm_stderr": 0.03218358107742613 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.33112582781456956, "acc_stderr": 0.038425817186598696, "acc_norm": 0.33112582781456956, "acc_norm_stderr": 0.038425817186598696 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.7577981651376147, "acc_stderr": 0.01836817630659862, "acc_norm": 0.7577981651376147, "acc_norm_stderr": 0.01836817630659862 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.4212962962962963, "acc_stderr": 0.03367462138896079, "acc_norm": 0.4212962962962963, "acc_norm_stderr": 0.03367462138896079 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.8186274509803921, "acc_stderr": 0.02704462171947408, "acc_norm": 0.8186274509803921, "acc_norm_stderr": 0.02704462171947408 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.759493670886076, "acc_stderr": 0.027820781981149685, "acc_norm": 0.759493670886076, "acc_norm_stderr": 0.027820781981149685 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.6547085201793722, "acc_stderr": 0.03191100192835794, "acc_norm": 0.6547085201793722, "acc_norm_stderr": 0.03191100192835794 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.6412213740458015, "acc_stderr": 0.04206739313864908, "acc_norm": 0.6412213740458015, "acc_norm_stderr": 0.04206739313864908 }, "harness|hendrycksTest-international_law|5": { "acc": 0.6776859504132231, "acc_stderr": 0.042664163633521685, "acc_norm": 0.6776859504132231, "acc_norm_stderr": 0.042664163633521685 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.75, "acc_stderr": 0.04186091791394607, "acc_norm": 0.75, "acc_norm_stderr": 0.04186091791394607 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.6993865030674846, "acc_stderr": 0.03602511318806771, "acc_norm": 0.6993865030674846, "acc_norm_stderr": 0.03602511318806771 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.41964285714285715, "acc_stderr": 0.04684099321077106, "acc_norm": 0.41964285714285715, "acc_norm_stderr": 0.04684099321077106 }, "harness|hendrycksTest-management|5": { "acc": 0.7281553398058253, "acc_stderr": 0.044052680241409216, "acc_norm": 0.7281553398058253, "acc_norm_stderr": 0.044052680241409216 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8504273504273504, "acc_stderr": 0.023365051491753715, "acc_norm": 0.8504273504273504, "acc_norm_stderr": 0.023365051491753715 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.6, "acc_stderr": 0.04923659639173309, "acc_norm": 0.6, "acc_norm_stderr": 0.04923659639173309 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.7624521072796935, "acc_stderr": 0.015218733046150195, "acc_norm": 0.7624521072796935, "acc_norm_stderr": 0.015218733046150195 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.6560693641618497, "acc_stderr": 0.025574123786546648, "acc_norm": 0.6560693641618497, "acc_norm_stderr": 0.025574123786546648 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.4346368715083799, "acc_stderr": 0.016578997435496713, "acc_norm": 0.4346368715083799, "acc_norm_stderr": 0.016578997435496713 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.673202614379085, "acc_stderr": 0.02685729466328141, "acc_norm": 0.673202614379085, "acc_norm_stderr": 0.02685729466328141 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.7009646302250804, "acc_stderr": 0.026003301117885142, "acc_norm": 0.7009646302250804, "acc_norm_stderr": 0.026003301117885142 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.7067901234567902, "acc_stderr": 0.025329888171900926, "acc_norm": 0.7067901234567902, "acc_norm_stderr": 0.025329888171900926 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.4645390070921986, "acc_stderr": 0.029752389657427047, "acc_norm": 0.4645390070921986, "acc_norm_stderr": 0.029752389657427047 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.45697522816166886, "acc_stderr": 0.012722869501611419, "acc_norm": 0.45697522816166886, "acc_norm_stderr": 0.012722869501611419 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.5514705882352942, "acc_stderr": 0.0302114796091216, "acc_norm": 0.5514705882352942, "acc_norm_stderr": 0.0302114796091216 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.6013071895424836, "acc_stderr": 0.01980828131744984, "acc_norm": 0.6013071895424836, "acc_norm_stderr": 0.01980828131744984 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6636363636363637, "acc_stderr": 0.04525393596302506, "acc_norm": 0.6636363636363637, "acc_norm_stderr": 0.04525393596302506 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.6448979591836734, "acc_stderr": 0.030635655150387638, "acc_norm": 0.6448979591836734, "acc_norm_stderr": 0.030635655150387638 }, "harness|hendrycksTest-sociology|5": { "acc": 0.7611940298507462, "acc_stderr": 0.03014777593540922, "acc_norm": 0.7611940298507462, "acc_norm_stderr": 0.03014777593540922 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.84, "acc_stderr": 0.03684529491774708, "acc_norm": 0.84, "acc_norm_stderr": 0.03684529491774708 }, "harness|hendrycksTest-virology|5": { "acc": 0.4819277108433735, "acc_stderr": 0.038899512528272166, "acc_norm": 0.4819277108433735, "acc_norm_stderr": 0.038899512528272166 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.7894736842105263, "acc_stderr": 0.03126781714663179, "acc_norm": 0.7894736842105263, "acc_norm_stderr": 0.03126781714663179 }, "harness|truthfulqa:mc|0": { "mc1": 0.3329253365973072, "mc1_stderr": 0.016497402382012052, "mc2": 0.4762440289139372, "mc2_stderr": 0.014987437363380035 }, "harness|winogrande|5": { "acc": 0.7616416732438832, "acc_stderr": 0.01197494866770231 }, "harness|gsm8k|5": { "acc": 0.20621683093252463, "acc_stderr": 0.01114436408978147 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_ibivibiv__bubo-bubo-13b
[ "region:us" ]
2024-01-25T01:49:34+00:00
{"pretty_name": "Evaluation run of ibivibiv/bubo-bubo-13b", "dataset_summary": "Dataset automatically created during the evaluation run of model [ibivibiv/bubo-bubo-13b](https://huggingface.co/ibivibiv/bubo-bubo-13b) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_ibivibiv__bubo-bubo-13b\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-01-25T01:47:11.871691](https://huggingface.co/datasets/open-llm-leaderboard/details_ibivibiv__bubo-bubo-13b/blob/main/results_2024-01-25T01-47-11.871691.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.579149139810157,\n \"acc_stderr\": 0.03345275342485905,\n \"acc_norm\": 0.5866287285681321,\n \"acc_norm_stderr\": 0.03417205569884806,\n \"mc1\": 0.3329253365973072,\n \"mc1_stderr\": 0.016497402382012052,\n \"mc2\": 0.4762440289139372,\n \"mc2_stderr\": 0.014987437363380035\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.5631399317406144,\n \"acc_stderr\": 0.014494421584256515,\n \"acc_norm\": 0.6143344709897611,\n \"acc_norm_stderr\": 0.014224250973257177\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6317466640111532,\n \"acc_stderr\": 0.004813448615404432,\n \"acc_norm\": 0.8314080860386377,\n \"acc_norm_stderr\": 0.0037362592995204874\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.32,\n \"acc_stderr\": 0.046882617226215034,\n \"acc_norm\": 0.32,\n \"acc_norm_stderr\": 0.046882617226215034\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.5481481481481482,\n \"acc_stderr\": 0.04299268905480864,\n \"acc_norm\": 0.5481481481481482,\n \"acc_norm_stderr\": 0.04299268905480864\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.5657894736842105,\n \"acc_stderr\": 0.04033565667848319,\n \"acc_norm\": 0.5657894736842105,\n \"acc_norm_stderr\": 0.04033565667848319\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.55,\n \"acc_stderr\": 0.049999999999999996,\n \"acc_norm\": 0.55,\n \"acc_norm_stderr\": 0.049999999999999996\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.6,\n \"acc_stderr\": 0.030151134457776292,\n \"acc_norm\": 0.6,\n \"acc_norm_stderr\": 0.030151134457776292\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.6388888888888888,\n \"acc_stderr\": 0.04016660030451233,\n \"acc_norm\": 0.6388888888888888,\n \"acc_norm_stderr\": 0.04016660030451233\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.38,\n \"acc_stderr\": 0.04878317312145632,\n \"acc_norm\": 0.38,\n \"acc_norm_stderr\": 0.04878317312145632\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.43,\n \"acc_stderr\": 0.049756985195624284,\n \"acc_norm\": 0.43,\n \"acc_norm_stderr\": 0.049756985195624284\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.34,\n \"acc_stderr\": 0.04760952285695235,\n \"acc_norm\": 0.34,\n \"acc_norm_stderr\": 0.04760952285695235\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.5260115606936416,\n \"acc_stderr\": 0.038073017265045125,\n \"acc_norm\": 0.5260115606936416,\n \"acc_norm_stderr\": 0.038073017265045125\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.3431372549019608,\n \"acc_stderr\": 0.04724007352383887,\n \"acc_norm\": 0.3431372549019608,\n \"acc_norm_stderr\": 0.04724007352383887\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.71,\n \"acc_stderr\": 0.04560480215720685,\n \"acc_norm\": 0.71,\n \"acc_norm_stderr\": 0.04560480215720685\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.49361702127659574,\n \"acc_stderr\": 0.032683358999363366,\n \"acc_norm\": 0.49361702127659574,\n \"acc_norm_stderr\": 0.032683358999363366\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.35964912280701755,\n \"acc_stderr\": 0.045144961328736334,\n \"acc_norm\": 0.35964912280701755,\n \"acc_norm_stderr\": 0.045144961328736334\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.5586206896551724,\n \"acc_stderr\": 0.04137931034482757,\n \"acc_norm\": 0.5586206896551724,\n \"acc_norm_stderr\": 0.04137931034482757\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.3439153439153439,\n \"acc_stderr\": 0.024464426625596426,\n \"acc_norm\": 0.3439153439153439,\n \"acc_norm_stderr\": 0.024464426625596426\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.3333333333333333,\n \"acc_stderr\": 0.042163702135578345,\n \"acc_norm\": 0.3333333333333333,\n \"acc_norm_stderr\": 0.042163702135578345\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.42,\n \"acc_stderr\": 0.049604496374885836,\n \"acc_norm\": 0.42,\n \"acc_norm_stderr\": 0.049604496374885836\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.6903225806451613,\n \"acc_stderr\": 0.026302774983517414,\n \"acc_norm\": 0.6903225806451613,\n \"acc_norm_stderr\": 0.026302774983517414\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.45320197044334976,\n \"acc_stderr\": 0.035025446508458714,\n \"acc_norm\": 0.45320197044334976,\n \"acc_norm_stderr\": 0.035025446508458714\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.58,\n \"acc_stderr\": 0.049604496374885836,\n \"acc_norm\": 0.58,\n \"acc_norm_stderr\": 0.049604496374885836\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.6787878787878788,\n \"acc_stderr\": 0.0364620496325381,\n \"acc_norm\": 0.6787878787878788,\n \"acc_norm_stderr\": 0.0364620496325381\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.7424242424242424,\n \"acc_stderr\": 0.03115626951964683,\n \"acc_norm\": 0.7424242424242424,\n \"acc_norm_stderr\": 0.03115626951964683\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.8341968911917098,\n \"acc_stderr\": 0.026839845022314415,\n \"acc_norm\": 0.8341968911917098,\n \"acc_norm_stderr\": 0.026839845022314415\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.558974358974359,\n \"acc_stderr\": 0.02517404838400075,\n \"acc_norm\": 0.558974358974359,\n \"acc_norm_stderr\": 0.02517404838400075\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.3,\n \"acc_stderr\": 0.027940457136228416,\n \"acc_norm\": 0.3,\n \"acc_norm_stderr\": 0.027940457136228416\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.5672268907563025,\n \"acc_stderr\": 0.03218358107742613,\n \"acc_norm\": 0.5672268907563025,\n \"acc_norm_stderr\": 0.03218358107742613\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.33112582781456956,\n \"acc_stderr\": 0.038425817186598696,\n \"acc_norm\": 0.33112582781456956,\n \"acc_norm_stderr\": 0.038425817186598696\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.7577981651376147,\n \"acc_stderr\": 0.01836817630659862,\n \"acc_norm\": 0.7577981651376147,\n \"acc_norm_stderr\": 0.01836817630659862\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.4212962962962963,\n \"acc_stderr\": 0.03367462138896079,\n \"acc_norm\": 0.4212962962962963,\n \"acc_norm_stderr\": 0.03367462138896079\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.8186274509803921,\n \"acc_stderr\": 0.02704462171947408,\n \"acc_norm\": 0.8186274509803921,\n \"acc_norm_stderr\": 0.02704462171947408\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.759493670886076,\n \"acc_stderr\": 0.027820781981149685,\n \"acc_norm\": 0.759493670886076,\n \"acc_norm_stderr\": 0.027820781981149685\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.6547085201793722,\n \"acc_stderr\": 0.03191100192835794,\n \"acc_norm\": 0.6547085201793722,\n \"acc_norm_stderr\": 0.03191100192835794\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.6412213740458015,\n \"acc_stderr\": 0.04206739313864908,\n \"acc_norm\": 0.6412213740458015,\n \"acc_norm_stderr\": 0.04206739313864908\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.6776859504132231,\n \"acc_stderr\": 0.042664163633521685,\n \"acc_norm\": 0.6776859504132231,\n \"acc_norm_stderr\": 0.042664163633521685\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.75,\n \"acc_stderr\": 0.04186091791394607,\n \"acc_norm\": 0.75,\n \"acc_norm_stderr\": 0.04186091791394607\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.6993865030674846,\n \"acc_stderr\": 0.03602511318806771,\n \"acc_norm\": 0.6993865030674846,\n \"acc_norm_stderr\": 0.03602511318806771\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.41964285714285715,\n \"acc_stderr\": 0.04684099321077106,\n \"acc_norm\": 0.41964285714285715,\n \"acc_norm_stderr\": 0.04684099321077106\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.7281553398058253,\n \"acc_stderr\": 0.044052680241409216,\n \"acc_norm\": 0.7281553398058253,\n \"acc_norm_stderr\": 0.044052680241409216\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8504273504273504,\n \"acc_stderr\": 0.023365051491753715,\n \"acc_norm\": 0.8504273504273504,\n \"acc_norm_stderr\": 0.023365051491753715\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.6,\n \"acc_stderr\": 0.04923659639173309,\n \"acc_norm\": 0.6,\n \"acc_norm_stderr\": 0.04923659639173309\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.7624521072796935,\n \"acc_stderr\": 0.015218733046150195,\n \"acc_norm\": 0.7624521072796935,\n \"acc_norm_stderr\": 0.015218733046150195\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.6560693641618497,\n \"acc_stderr\": 0.025574123786546648,\n \"acc_norm\": 0.6560693641618497,\n \"acc_norm_stderr\": 0.025574123786546648\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.4346368715083799,\n \"acc_stderr\": 0.016578997435496713,\n \"acc_norm\": 0.4346368715083799,\n \"acc_norm_stderr\": 0.016578997435496713\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.673202614379085,\n \"acc_stderr\": 0.02685729466328141,\n \"acc_norm\": 0.673202614379085,\n \"acc_norm_stderr\": 0.02685729466328141\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.7009646302250804,\n \"acc_stderr\": 0.026003301117885142,\n \"acc_norm\": 0.7009646302250804,\n \"acc_norm_stderr\": 0.026003301117885142\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.7067901234567902,\n \"acc_stderr\": 0.025329888171900926,\n \"acc_norm\": 0.7067901234567902,\n \"acc_norm_stderr\": 0.025329888171900926\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.4645390070921986,\n \"acc_stderr\": 0.029752389657427047,\n \"acc_norm\": 0.4645390070921986,\n \"acc_norm_stderr\": 0.029752389657427047\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.45697522816166886,\n \"acc_stderr\": 0.012722869501611419,\n \"acc_norm\": 0.45697522816166886,\n \"acc_norm_stderr\": 0.012722869501611419\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.5514705882352942,\n \"acc_stderr\": 0.0302114796091216,\n \"acc_norm\": 0.5514705882352942,\n \"acc_norm_stderr\": 0.0302114796091216\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.6013071895424836,\n \"acc_stderr\": 0.01980828131744984,\n \"acc_norm\": 0.6013071895424836,\n \"acc_norm_stderr\": 0.01980828131744984\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6636363636363637,\n \"acc_stderr\": 0.04525393596302506,\n \"acc_norm\": 0.6636363636363637,\n \"acc_norm_stderr\": 0.04525393596302506\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.6448979591836734,\n \"acc_stderr\": 0.030635655150387638,\n \"acc_norm\": 0.6448979591836734,\n \"acc_norm_stderr\": 0.030635655150387638\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.7611940298507462,\n \"acc_stderr\": 0.03014777593540922,\n \"acc_norm\": 0.7611940298507462,\n \"acc_norm_stderr\": 0.03014777593540922\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.84,\n \"acc_stderr\": 0.03684529491774708,\n \"acc_norm\": 0.84,\n \"acc_norm_stderr\": 0.03684529491774708\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.4819277108433735,\n \"acc_stderr\": 0.038899512528272166,\n \"acc_norm\": 0.4819277108433735,\n \"acc_norm_stderr\": 0.038899512528272166\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.7894736842105263,\n \"acc_stderr\": 0.03126781714663179,\n \"acc_norm\": 0.7894736842105263,\n \"acc_norm_stderr\": 0.03126781714663179\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.3329253365973072,\n \"mc1_stderr\": 0.016497402382012052,\n \"mc2\": 0.4762440289139372,\n \"mc2_stderr\": 0.014987437363380035\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7616416732438832,\n \"acc_stderr\": 0.01197494866770231\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.20621683093252463,\n \"acc_stderr\": 0.01114436408978147\n }\n}\n```", "repo_url": "https://huggingface.co/ibivibiv/bubo-bubo-13b", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_01_25T01_47_11.871691", "path": ["**/details_harness|arc:challenge|25_2024-01-25T01-47-11.871691.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-01-25T01-47-11.871691.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_01_25T01_47_11.871691", "path": ["**/details_harness|gsm8k|5_2024-01-25T01-47-11.871691.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-01-25T01-47-11.871691.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_01_25T01_47_11.871691", "path": ["**/details_harness|hellaswag|10_2024-01-25T01-47-11.871691.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-01-25T01-47-11.871691.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_01_25T01_47_11.871691", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-25T01-47-11.871691.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-25T01-47-11.871691.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-25T01-47-11.871691.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-25T01-47-11.871691.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-25T01-47-11.871691.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-25T01-47-11.871691.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-25T01-47-11.871691.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-25T01-47-11.871691.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-25T01-47-11.871691.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-25T01-47-11.871691.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-25T01-47-11.871691.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-25T01-47-11.871691.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-25T01-47-11.871691.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-25T01-47-11.871691.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-25T01-47-11.871691.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-25T01-47-11.871691.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-25T01-47-11.871691.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-25T01-47-11.871691.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-25T01-47-11.871691.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-25T01-47-11.871691.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-25T01-47-11.871691.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-25T01-47-11.871691.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-25T01-47-11.871691.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-25T01-47-11.871691.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-25T01-47-11.871691.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-25T01-47-11.871691.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-25T01-47-11.871691.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-25T01-47-11.871691.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-25T01-47-11.871691.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-25T01-47-11.871691.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-25T01-47-11.871691.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-25T01-47-11.871691.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-25T01-47-11.871691.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-25T01-47-11.871691.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-25T01-47-11.871691.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-25T01-47-11.871691.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-25T01-47-11.871691.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-25T01-47-11.871691.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-25T01-47-11.871691.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-25T01-47-11.871691.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-25T01-47-11.871691.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-25T01-47-11.871691.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-25T01-47-11.871691.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-25T01-47-11.871691.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-25T01-47-11.871691.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-25T01-47-11.871691.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-25T01-47-11.871691.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-25T01-47-11.871691.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-25T01-47-11.871691.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-25T01-47-11.871691.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-25T01-47-11.871691.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-25T01-47-11.871691.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-25T01-47-11.871691.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-25T01-47-11.871691.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-25T01-47-11.871691.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-25T01-47-11.871691.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-25T01-47-11.871691.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-25T01-47-11.871691.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-25T01-47-11.871691.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-25T01-47-11.871691.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-25T01-47-11.871691.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-25T01-47-11.871691.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-25T01-47-11.871691.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-25T01-47-11.871691.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-25T01-47-11.871691.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-25T01-47-11.871691.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-25T01-47-11.871691.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-25T01-47-11.871691.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-25T01-47-11.871691.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-25T01-47-11.871691.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-25T01-47-11.871691.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-25T01-47-11.871691.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-25T01-47-11.871691.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-25T01-47-11.871691.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-25T01-47-11.871691.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-25T01-47-11.871691.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-25T01-47-11.871691.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-25T01-47-11.871691.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-25T01-47-11.871691.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-25T01-47-11.871691.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-25T01-47-11.871691.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-25T01-47-11.871691.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-25T01-47-11.871691.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-25T01-47-11.871691.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-25T01-47-11.871691.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-25T01-47-11.871691.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-25T01-47-11.871691.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-25T01-47-11.871691.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-25T01-47-11.871691.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-25T01-47-11.871691.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-25T01-47-11.871691.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-25T01-47-11.871691.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-25T01-47-11.871691.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-25T01-47-11.871691.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-25T01-47-11.871691.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-25T01-47-11.871691.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-25T01-47-11.871691.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-25T01-47-11.871691.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-25T01-47-11.871691.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-25T01-47-11.871691.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-25T01-47-11.871691.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-25T01-47-11.871691.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-25T01-47-11.871691.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-25T01-47-11.871691.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-25T01-47-11.871691.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-25T01-47-11.871691.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-25T01-47-11.871691.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-25T01-47-11.871691.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-25T01-47-11.871691.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-25T01-47-11.871691.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-25T01-47-11.871691.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-25T01-47-11.871691.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-25T01-47-11.871691.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-25T01-47-11.871691.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_01_25T01_47_11.871691", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-25T01-47-11.871691.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-25T01-47-11.871691.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_01_25T01_47_11.871691", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-25T01-47-11.871691.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-25T01-47-11.871691.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_01_25T01_47_11.871691", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-25T01-47-11.871691.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-25T01-47-11.871691.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_01_25T01_47_11.871691", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-25T01-47-11.871691.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-25T01-47-11.871691.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_01_25T01_47_11.871691", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-25T01-47-11.871691.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-25T01-47-11.871691.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_01_25T01_47_11.871691", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-25T01-47-11.871691.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-25T01-47-11.871691.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_01_25T01_47_11.871691", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-25T01-47-11.871691.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-25T01-47-11.871691.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_01_25T01_47_11.871691", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-25T01-47-11.871691.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-25T01-47-11.871691.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_01_25T01_47_11.871691", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-25T01-47-11.871691.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-25T01-47-11.871691.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_01_25T01_47_11.871691", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-25T01-47-11.871691.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-25T01-47-11.871691.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_01_25T01_47_11.871691", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-25T01-47-11.871691.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-25T01-47-11.871691.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_01_25T01_47_11.871691", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-25T01-47-11.871691.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-25T01-47-11.871691.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_01_25T01_47_11.871691", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-25T01-47-11.871691.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-25T01-47-11.871691.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_01_25T01_47_11.871691", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-25T01-47-11.871691.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-25T01-47-11.871691.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_01_25T01_47_11.871691", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-25T01-47-11.871691.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-25T01-47-11.871691.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_01_25T01_47_11.871691", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-25T01-47-11.871691.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-25T01-47-11.871691.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_01_25T01_47_11.871691", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-25T01-47-11.871691.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-25T01-47-11.871691.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_01_25T01_47_11.871691", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-25T01-47-11.871691.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-25T01-47-11.871691.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_01_25T01_47_11.871691", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-25T01-47-11.871691.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-25T01-47-11.871691.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_01_25T01_47_11.871691", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-25T01-47-11.871691.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-25T01-47-11.871691.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_01_25T01_47_11.871691", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-25T01-47-11.871691.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-25T01-47-11.871691.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_01_25T01_47_11.871691", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-25T01-47-11.871691.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-25T01-47-11.871691.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_01_25T01_47_11.871691", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-25T01-47-11.871691.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-25T01-47-11.871691.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_01_25T01_47_11.871691", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-25T01-47-11.871691.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-25T01-47-11.871691.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_01_25T01_47_11.871691", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-25T01-47-11.871691.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-25T01-47-11.871691.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_01_25T01_47_11.871691", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-25T01-47-11.871691.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-25T01-47-11.871691.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_01_25T01_47_11.871691", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-25T01-47-11.871691.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-25T01-47-11.871691.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_01_25T01_47_11.871691", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-25T01-47-11.871691.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-25T01-47-11.871691.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_01_25T01_47_11.871691", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-25T01-47-11.871691.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-25T01-47-11.871691.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_01_25T01_47_11.871691", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-25T01-47-11.871691.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-25T01-47-11.871691.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_01_25T01_47_11.871691", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-25T01-47-11.871691.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-25T01-47-11.871691.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_01_25T01_47_11.871691", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-25T01-47-11.871691.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-25T01-47-11.871691.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_01_25T01_47_11.871691", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-25T01-47-11.871691.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-25T01-47-11.871691.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_01_25T01_47_11.871691", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-25T01-47-11.871691.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-25T01-47-11.871691.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_01_25T01_47_11.871691", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-25T01-47-11.871691.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-25T01-47-11.871691.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_01_25T01_47_11.871691", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-25T01-47-11.871691.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-25T01-47-11.871691.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_01_25T01_47_11.871691", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-25T01-47-11.871691.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-25T01-47-11.871691.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_01_25T01_47_11.871691", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-25T01-47-11.871691.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-25T01-47-11.871691.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_01_25T01_47_11.871691", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-25T01-47-11.871691.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-25T01-47-11.871691.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_01_25T01_47_11.871691", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-25T01-47-11.871691.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-25T01-47-11.871691.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_01_25T01_47_11.871691", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-25T01-47-11.871691.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-25T01-47-11.871691.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_01_25T01_47_11.871691", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-25T01-47-11.871691.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-25T01-47-11.871691.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_01_25T01_47_11.871691", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-25T01-47-11.871691.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-25T01-47-11.871691.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_01_25T01_47_11.871691", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-25T01-47-11.871691.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-25T01-47-11.871691.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_01_25T01_47_11.871691", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-25T01-47-11.871691.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-25T01-47-11.871691.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_01_25T01_47_11.871691", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-25T01-47-11.871691.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-25T01-47-11.871691.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_01_25T01_47_11.871691", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-25T01-47-11.871691.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-25T01-47-11.871691.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_01_25T01_47_11.871691", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-25T01-47-11.871691.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-25T01-47-11.871691.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_01_25T01_47_11.871691", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-25T01-47-11.871691.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-25T01-47-11.871691.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_01_25T01_47_11.871691", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-25T01-47-11.871691.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-25T01-47-11.871691.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_01_25T01_47_11.871691", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-25T01-47-11.871691.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-25T01-47-11.871691.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_01_25T01_47_11.871691", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-25T01-47-11.871691.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-25T01-47-11.871691.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_01_25T01_47_11.871691", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-25T01-47-11.871691.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-25T01-47-11.871691.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_01_25T01_47_11.871691", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-25T01-47-11.871691.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-25T01-47-11.871691.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_01_25T01_47_11.871691", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-25T01-47-11.871691.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-25T01-47-11.871691.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_01_25T01_47_11.871691", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-25T01-47-11.871691.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-25T01-47-11.871691.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_01_25T01_47_11.871691", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-25T01-47-11.871691.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-25T01-47-11.871691.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_01_25T01_47_11.871691", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-25T01-47-11.871691.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-25T01-47-11.871691.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_01_25T01_47_11.871691", "path": ["**/details_harness|winogrande|5_2024-01-25T01-47-11.871691.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-01-25T01-47-11.871691.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_01_25T01_47_11.871691", "path": ["results_2024-01-25T01-47-11.871691.parquet"]}, {"split": "latest", "path": ["results_2024-01-25T01-47-11.871691.parquet"]}]}]}
2024-01-25T01:50:01+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of ibivibiv/bubo-bubo-13b Dataset automatically created during the evaluation run of model ibivibiv/bubo-bubo-13b on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-01-25T01:47:11.871691(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of ibivibiv/bubo-bubo-13b\n\n\n\nDataset automatically created during the evaluation run of model ibivibiv/bubo-bubo-13b on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-25T01:47:11.871691(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of ibivibiv/bubo-bubo-13b\n\n\n\nDataset automatically created during the evaluation run of model ibivibiv/bubo-bubo-13b on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-25T01:47:11.871691(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ 6, 185, 68, 4, 40, 29, 3, 4, 9, 6, 5, 7, 4, 7, 10, 9, 5, 9, 8, 10, 46, 8, 7, 10, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of ibivibiv/bubo-bubo-13b\n\n\n\nDataset automatically created during the evaluation run of model ibivibiv/bubo-bubo-13b on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2024-01-25T01:47:11.871691(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):## Dataset Details### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:## Uses### Direct Use### Out-of-Scope Use## Dataset Structure## Dataset Creation### Curation Rationale### Source Data#### Data Collection and Processing#### Who are the source data producers?### Annotations [optional]#### Annotation process#### Who are the annotators?#### Personal and Sensitive Information## Bias, Risks, and Limitations### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:## Glossary [optional]## More Information [optional]## Dataset Card Authors [optional]## Dataset Card Contact" ]
[ -0.04367582127451897, 0.20187446475028992, -0.005289841443300247, 0.036862894892692566, 0.07474914193153381, -0.00926203466951847, 0.05812988430261612, 0.10262764990329742, 0.035288166254758835, 0.18853792548179626, -0.029262393712997437, 0.10682475566864014, 0.06841255724430084, 0.12510241568088531, 0.030087696388363838, -0.11600703746080399, 0.025652213022112846, -0.08289884775876999, 0.11461880058050156, 0.0826474204659462, 0.05745901167392731, -0.0721781849861145, 0.060876376926898956, -0.026790793985128403, 0.03334667906165123, -0.012290197424590588, -0.07918428629636765, -0.0295427106320858, 0.0940263643860817, 0.10907205194234848, 0.03412051126360893, -0.01813427358865738, 0.021231388673186302, -0.2742544412612915, 0.013830373995006084, 0.08936676383018494, -0.015075631439685822, 0.03425414487719536, 0.12861469388008118, -0.07987545430660248, 0.08576842397451401, -0.013246459886431694, 0.07941371202468872, 0.057531800121068954, -0.11745230108499527, -0.14313893020153046, -0.14722192287445068, 0.022789914160966873, 0.04593696445226669, 0.037598151713609695, -0.029267964884638786, 0.15081113576889038, -0.07445716112852097, 0.04723365977406502, 0.1309785097837448, -0.09526980668306351, -0.01393823977559805, 0.058675430715084076, 0.016743432730436325, 0.08051060885190964, -0.08391722291707993, -0.030260391533374786, 0.031766217201948166, 0.05180393159389496, -0.016752202063798904, 0.01656498946249485, -0.017527444288134575, 0.010618109256029129, -0.13754433393478394, -0.13612036406993866, 0.1319848746061325, 0.004619064275175333, -0.0552169531583786, -0.17819121479988098, -0.008337290026247501, 0.017122134566307068, -0.00825921818614006, 0.005006666295230389, -0.005317288916558027, -0.01746675744652748, 0.09716270118951797, 0.0005532022914849222, -0.09348057955503464, -0.03502291440963745, 0.00111784425098449, 0.08227119594812393, 0.024621181190013885, -0.006543344352394342, 0.009379774332046509, 0.11404961347579956, 0.008338271640241146, -0.06231973320245743, -0.07085031270980835, -0.05360507592558861, -0.11172006279230118, -0.038740478456020355, 0.019896015524864197, -0.06115315854549408, 0.03810495510697365, 0.24498236179351807, -0.004587515257298946, 0.02112254686653614, -0.118554025888443, 0.009957822039723396, 0.12450455129146576, 0.05571024492383003, -0.08134127408266068, -0.044284380972385406, -0.032750800251960754, 0.028923876583576202, 0.03039337322115898, -0.009717819280922413, 0.008576168678700924, 0.06141180917620659, 0.018377576023340225, 0.12237747758626938, 0.12066683173179626, 0.027030766010284424, -0.08415152132511139, -0.02146783657371998, 0.24825289845466614, -0.13600318133831024, -0.015030715614557266, 0.021378977224230766, -0.036312371492385864, -0.13078710436820984, 0.0700235441327095, -0.011573106050491333, -0.054762087762355804, 0.12154269218444824, -0.043625812977552414, -0.07825508713722229, -0.07260412722826004, -0.06887111067771912, 0.055848393589258194, 0.004403560422360897, -0.046958357095718384, -0.06746676564216614, -0.10558632016181946, -0.08071961998939514, 0.026713654398918152, -0.07318007946014404, -0.01538478210568428, 0.023544276133179665, 0.003909370396286249, -0.014202999882400036, -0.014706943184137344, 0.11077635735273361, -0.06488966196775436, 0.03948333486914635, -0.007247008383274078, 0.02318212017416954, 0.10036206990480423, 0.03854690119624138, -0.11526614427566528, 0.07539860159158707, -0.12399142235517502, 0.1052076667547226, -0.1204107254743576, -0.028409015387296677, -0.11929769814014435, 0.0000760436087148264, -0.03126067668199539, 0.04468374699354172, -0.03257458284497261, 0.0862184539437294, -0.19708877801895142, -0.003886670107021928, 0.1698591411113739, -0.12005533277988434, -0.07561647146940231, 0.09052933752536774, -0.048151325434446335, 0.07128691673278809, 0.044647734612226486, 0.09198182821273804, 0.10110343247652054, -0.08238928020000458, -0.09581559896469116, -0.057782117277383804, -0.023023026064038277, 0.16300302743911743, 0.06715917587280273, -0.07579459249973297, 0.09702741354703903, 0.04674573242664337, -0.003096739761531353, -0.06180490553379059, -0.0021379520185291767, -0.0640130415558815, -0.018827905878424644, -0.07149851322174072, -0.052031923085451126, -0.0022827007342129946, -0.07239828258752823, -0.014618744142353535, -0.08359991014003754, -0.01595338247716427, 0.10225816816091537, -0.019670519977808, 0.008053196594119072, -0.0736260637640953, 0.028544727712869644, -0.004568005446344614, 0.013550077565014362, -0.21869982779026031, -0.07050391286611557, 0.03569059446454048, -0.20158499479293823, 0.0625995323061943, 0.037092264741659164, 0.011089446023106575, 0.048987504094839096, -0.00756264291703701, 0.029318461194634438, 0.019278082996606827, -0.016710292547941208, -0.011349953711032867, -0.14453496038913727, -0.056966476142406464, -0.09045249968767166, 0.09642373025417328, -0.134918674826622, -0.01126057654619217, 0.0444665364921093, 0.14941589534282684, 0.021255575120449066, -0.08223561197519302, 0.061449311673641205, 0.010383493266999722, -0.044650886207818985, -0.04801074042916298, -0.007674726657569408, -0.03418396785855293, 0.038126908242702484, 0.031001873314380646, -0.18284399807453156, -0.10467855632305145, 0.0653882548213005, 0.14315254986286163, -0.06582482904195786, -0.08241444081068039, -0.06334497779607773, -0.05598710849881172, -0.08028417825698853, -0.06954896450042725, 0.06706754118204117, 0.09197015315294266, 0.04379836469888687, -0.07339182496070862, -0.055866092443466187, 0.007804263383150101, 0.05146543309092522, -0.06224621832370758, 0.11330881714820862, 0.06836850941181183, -0.07945571094751358, 0.11123429983854294, -0.058061905205249786, 0.11241572350263596, 0.08575178682804108, 0.03616155683994293, -0.10745497792959213, 0.0066973064094781876, 0.06439423561096191, 0.05028631165623665, 0.07568201422691345, -0.05762545391917229, 0.03860511630773544, 0.08772283047437668, -0.018881671130657196, 0.03689423203468323, -0.06434359401464462, 0.03020826168358326, 0.03382481634616852, 0.005086190532892942, 0.0037067413795739412, 0.009610109031200409, 0.01914217881858349, 0.08665583282709122, 0.022757040336728096, 0.10636308044195175, -0.03150632977485657, -0.05090929567813873, -0.10107012093067169, 0.14335492253303528, -0.080254465341568, -0.2814396619796753, -0.17073561251163483, -0.05488942563533783, -0.030129030346870422, -0.007644978817552328, 0.060653068125247955, -0.013402780517935753, -0.10841479152441025, -0.10814820230007172, 0.06817629933357239, 0.04778415337204933, -0.14037589728832245, -0.05296947807073593, 0.05794345214962959, -0.0064393519423902035, -0.1721208393573761, 0.044921938329935074, 0.049377549439668655, -0.05332217365503311, -0.0006252587772905827, 0.06832987070083618, 0.11026465147733688, 0.09034159034490585, 0.08256831765174866, -0.032260648906230927, -0.004172224085777998, 0.17473353445529938, -0.10786493122577667, 0.024980003014206886, 0.10580943524837494, -0.05207306519150734, 0.058303721249103546, 0.1596410870552063, 0.012921705842018127, -0.09033816307783127, 0.053535155951976776, 0.09127584844827652, -0.06680003553628922, -0.2468772679567337, -0.12305619567632675, -0.03358691930770874, 0.017007693648338318, 0.11805609613656998, 0.06413912773132324, 0.03076029010117054, 0.018675576895475388, -0.12852244079113007, -0.004495483357459307, -0.04391945153474808, 0.0730612501502037, 0.05442725494503975, -0.0084695341065526, 0.041453514248132706, -0.04411114752292633, 0.01406122650951147, 0.1192217543721199, 0.05107097327709198, 0.13170872628688812, -0.046652041375637054, 0.1943201869726181, 0.09221304953098297, 0.06600033491849899, -0.038678087294101715, 0.04147876426577568, -0.016952810809016228, 0.05909203737974167, -0.01635214500129223, -0.10327596217393875, -0.06337112933397293, 0.09914030879735947, 0.036611054092645645, -0.06821348518133163, 0.04836731776595116, -0.10522303730249405, 0.0325370691716671, 0.17489205300807953, -0.025479279458522797, -0.12073404341936111, -0.06315696239471436, 0.06581588834524155, -0.037227511405944824, -0.10114549845457077, -0.01582452654838562, 0.08355657011270523, -0.1408177614212036, 0.00266563193872571, -0.032744139432907104, 0.081179678440094, -0.1212572455406189, -0.0313551239669323, -0.04310017079114914, 0.0384063720703125, -0.004145988263189793, 0.11663083732128143, -0.1287216991186142, 0.07845684885978699, -0.0014289619866758585, 0.017152471467852592, -0.1152341365814209, 0.056525714695453644, -0.0359724685549736, -0.06709840148687363, 0.129019096493721, -0.010658753104507923, -0.05569501966238022, -0.05184965580701828, -0.10782666504383087, -0.01245699543505907, 0.04852999374270439, -0.10327021032571793, 0.10446614027023315, 0.032497335225343704, -0.02526993677020073, -0.034284915775060654, -0.027074484154582024, -0.11939486116170883, -0.24767206609249115, 0.10704819113016129, -0.12850932776927948, 0.03405705466866493, -0.06573151051998138, -0.043795984238386154, -0.02520848996937275, 0.1337013989686966, -0.08292679488658905, -0.05214480310678482, -0.10457833856344223, 0.005660085938870907, 0.18884553015232086, -0.04535640776157379, 0.054581169039011, -0.04710540175437927, 0.18698108196258545, -0.029928140342235565, -0.044013671576976776, -0.012913856655359268, -0.09474664181470871, -0.19000856578350067, -0.05579287186264992, 0.1106531098484993, 0.08113286644220352, 0.023148247972130775, -0.010486224666237831, 0.011486424133181572, 0.01715736836194992, -0.09415049850940704, 0.01733352616429329, 0.10376089066267014, 0.12417589128017426, 0.06176426634192467, -0.033447638154029846, -0.12563711404800415, -0.10395359992980957, -0.10574222356081009, 0.04356975480914116, 0.180097758769989, -0.05883433669805527, 0.17340725660324097, 0.15744514763355255, -0.08999599516391754, -0.18933993577957153, -0.06538083404302597, 0.024421237409114838, -0.02177104726433754, 0.1208953857421875, -0.19943386316299438, 0.06274314224720001, 0.06912224739789963, -0.028312839567661285, 0.13024115562438965, -0.2662625312805176, -0.13866937160491943, 0.045826323330402374, 0.04872845485806465, -0.2287312150001526, -0.1757054328918457, -0.10210095345973969, -0.028913771733641624, -0.1813708394765854, 0.13683095574378967, 0.02875656448304653, 0.029119886457920074, -0.02875557728111744, 0.08131327480077744, 0.05386142432689667, -0.07077111303806305, 0.12899793684482574, -0.01415047887712717, 0.022223232313990593, -0.10233841836452484, -0.038839761167764664, 0.006495949812233448, -0.03592514246702194, 0.07595042884349823, 0.018683860078454018, 0.04968881979584694, -0.08912504464387894, -0.038622304797172546, -0.07492544502019882, 0.0386294424533844, -0.0769888162612915, -0.05483333393931389, -0.08049046993255615, 0.08606812357902527, 0.08141069859266281, -0.008565596304833889, 0.03275963291525841, -0.05061613395810127, 0.05324649065732956, 0.2087816447019577, 0.09160812944173813, 0.04971824958920479, -0.08891782909631729, -0.03755311667919159, -0.013774807564914227, -0.004540125373750925, -0.11151787638664246, 0.044800106436014175, 0.07208102196455002, 0.048409800976514816, 0.10074775665998459, -0.026403745636343956, -0.18232759833335876, -0.006256472319364548, 0.07865796983242035, -0.08247145265340805, -0.1818620264530182, 0.048628102988004684, 0.15950027108192444, -0.15234169363975525, -0.08153683692216873, 0.07304563373327255, 0.01816517673432827, -0.04552972689270973, -0.004129315726459026, 0.07969187945127487, 0.04964432120323181, 0.10535015910863876, 0.013605457730591297, 0.048565223813056946, -0.0783054456114769, 0.08200731873512268, 0.13903942704200745, -0.10315420478582382, 0.002549003576859832, 0.022281212732195854, -0.045804161578416824, -0.0695919394493103, 0.0028666870202869177, 0.017427172511816025, 0.02446138672530651, -0.026593612506985664, 0.025116322562098503, -0.027061883360147476, 0.06875951588153839, 0.13532119989395142, -0.0051195393316447735, 0.04801608994603157, 0.01462353952229023, 0.005577016156166792, -0.06580746173858643, 0.09869691729545593, 0.02350131794810295, 0.04727216064929962, -0.030600935220718384, 0.01737459935247898, 0.019624929875135422, -0.03296380862593651, 0.02002684958279133, -0.04957859590649605, -0.0768909677863121, 0.0017599493730813265, -0.17058494687080383, 0.06343087553977966, -0.08819741755723953, 0.011001138016581535, 0.00017645358457230031, -0.02232232131063938, -0.0033998251892626286, 0.0024193532299250364, -0.08342279493808746, -0.043806083500385284, -0.04648522660136223, 0.138979971408844, -0.19147755205631256, 0.0016865493962541223, 0.09330618381500244, -0.07296109944581985, 0.06565535813570023, -0.00124620262067765, -0.018755748867988586, 0.027597252279520035, -0.12604424357414246, 0.00015437649562954903, -0.02177337184548378, 0.06620946526527405, 0.010824860073626041, -0.13186143338680267, -0.0036916283424943686, 0.0010859676403924823, -0.0810837373137474, -0.00856830459088087, 0.03180079907178879, -0.15212905406951904, 0.061116885393857956, 0.0752846822142601, -0.04944855347275734, -0.04493309557437897, 0.05399825796484947, 0.047200702130794525, -0.009812084026634693, 0.09568867832422256, -0.006626299116760492, 0.024261489510536194, -0.1543731838464737, -0.047712814062833786, 0.000386591418646276, 0.013907978311181068, 0.04463733360171318, 0.013420102186501026, 0.016612064093351364, 0.019841820001602173, 0.24487370252609253, -0.015836050733923912, 0.02699095755815506, 0.015451586805284023, -0.0019417996518313885, -0.023821355774998665, 0.022430019453167915, 0.012980186380445957, -0.005263366736471653, 0.025499077513813972, 0.01246542576700449, -0.03721493110060692, -0.05845274776220322, -0.03666628524661064, 0.0576842725276947, 0.13883714377880096, 0.15377214550971985, -0.039476536214351654, 0.06893876940011978, -0.16715224087238312, -0.03760553151369095, 0.02082427591085434, -0.04900113120675087, 0.04170088842511177, -0.07383889704942703, 0.07416167855262756, 0.07509168982505798, -0.09784198552370071, 0.15461650490760803, -0.05582049489021301, -0.020524779334664345, -0.025419410318136215, -0.15973128378391266, -0.032376233488321304, 0.029883500188589096, 0.007532649673521519, -0.08997122943401337, 0.11823048442602158, 0.12435542047023773, -0.011006000451743603, 0.0007778042927384377, 0.07913839817047119, -0.06094703823328018, -0.056454066187143326, -0.03860648721456528, 0.0037657420616596937, 0.009644816629588604, -0.0004570460878312588, 0.08042213320732117, 0.02436927706003189, 0.060002248734235764, 0.07898741960525513, 0.10141240805387497, 0.03202006593346596, 0.006685222499072552, -0.040157806128263474, -0.05433318763971329, 0.004570500459522009, -0.0227236058562994, -0.05304987356066704, 0.22672607004642487, 0.04976750537753105, 0.020569225773215294, 0.013970119878649712, 0.2133840173482895, -0.0026025152765214443, -0.06882403790950775, -0.13142158091068268, 0.12937898933887482, -0.005662582349032164, 0.019205426797270775, 0.02984924614429474, -0.11311642080545425, 0.022672101855278015, 0.15141858160495758, 0.09558255225419998, 0.04704144969582558, 0.009608719497919083, 0.04965369403362274, 0.023081902414560318, -0.030888734385371208, 0.055039457976818085, 0.026172030717134476, 0.24960322678089142, -0.05750007554888725, 0.08592448383569717, -0.0011215814156457782, -0.0008876505889929831, -0.011236774735152721, 0.10240515321493149, -0.05442940071225166, 0.01163904182612896, -0.07177276909351349, 0.08407451957464218, -0.06587450951337814, -0.2653322219848633, 0.007941198535263538, -0.07946880906820297, -0.13935486972332, -0.022622937336564064, 0.02019287645816803, -0.021856704726815224, 0.04376092925667763, 0.034590497612953186, -0.026501068845391273, 0.17505493760108948, -0.0016100299544632435, -0.07480105757713318, -0.07449783384799957, 0.06592589616775513, -0.01983756385743618, 0.28993189334869385, -0.003213997231796384, 0.06744667887687683, 0.08817730844020844, -0.02149232290685177, -0.1322835236787796, 0.022365577518939972, 0.08352003246545792, -0.05184987187385559, 0.055450551211833954, 0.17224273085594177, -0.02564769797027111, 0.13886477053165436, 0.03490786999464035, -0.030509766191244125, 0.0742519199848175, 0.0775555744767189, 0.041645243763923645, -0.10149494558572769, 0.07208167016506195, -0.09159891307353973, 0.1314535290002823, 0.10825494676828384, -0.00926055945456028, -0.012233900837600231, -0.060138147324323654, 0.07306106388568878, -0.02889038249850273, 0.1534673124551773, -0.022077981382608414, -0.15433651208877563, 0.0454033799469471, 0.01538236252963543, 0.06253921240568161, -0.25348785519599915, -0.051760535687208176, 0.10609118640422821, -0.05345967039465904, 0.010631939396262169, 0.08972733467817307, 0.04191708192229271, 0.03585842251777649, -0.05391652509570122, -0.15381206572055817, 0.01803184673190117, 0.12244853377342224, -0.07766281068325043, -0.03265054151415825 ]
620ad80e5206bdd69dbcda57bdca27ad4d5fe85e
# Chat凉宫春日的对话抽取模型 我们希望有一个模型能够从小说的chunk中批量去提取摘要和对话 这个模型就是实现了这一点。模型使用了大约30k的中文小说数据和20k的英文小说数据进行训练,在qwen-1.8上进行了3个epoch的finetune。 原则上模型同时支持中文和英文小说的训练 主项目链接 https://github.com/LC1332/Chat-Haruhi-Suzumiya - [李鲁鲁](https://github.com/LC1332)完成了数据的收集,以及进一步将inference程序扩展到连续的chunks - [刘崇寒](https://github.com/khazic)完成了模型的训练 - [米唯实](https://github.com/hhhwmws0117)测试并上传模型到hugging face # Chat Haruhi Suzumiya's Dialogue Extraction Model We hope to have a model that can extract summaries and dialogues in batches from chunks of novels. This model achieves just that. It was trained using approximately 30k Chinese novels and 20k English novels, and was fine-tuned on qwen-1.8 for three epochs. In principle, the model supports training for both Chinese and English novels. Main project link: https://github.com/LC1332/Chat-Haruhi-Suzumiya # Inference Code https://github.com/LC1332/Chat-Haruhi-Suzumiya/blob/main/notebook/Dialogue_Speaker_Extract_Test.ipynb ```python from transformers import AutoTokenizer, AutoModel, AutoModelForCausalLM tokenizer = AutoTokenizer.from_pretrained("silk-road/Haruhi-Dialogue-Speaker-Extract_qwen18", trust_remote_code=True) model = AutoModelForCausalLM.from_pretrained("silk-road/Haruhi-Dialogue-Speaker-Extract_qwen18", device_map="auto", trust_remote_code=True) sys_prompt = "给定input paragraph,抽取其中的对话,并输出为json格式 Let's think it step by step 1. summarize input paragraph into bullet format,存储在summary字段 2. 抽取每一句对话的内容 dialogue,判断每一句话的说话人 said by, 存储在conversations中" text = "Your novel text" response_str, history = model.chat(tokenizer, text, history=[], system=sys_prompt) ``` # Official Prompt Chinese: ``` 给定input paragraph,抽取其中的对话,并输出为json格式 Let's think it step by step 1. summarize input paragraph into bullet format,存储在summary字段 2. 抽取每一句对话的内容 dialogue,判断每一句话的说话人 said by, 存储在conversations中 ``` English: ``` Given an input paragraph, extract the dialogues within it, and output them in JSON format. Let's think about it step by step: - Summarize the input paragraph into bullet points and store it in the 'summary' field. - Extract the content of each dialogue ('dialogue'), identify the speaker for each sentence ('said by'), and store these in 'conversations'. ``` # 数据 数据是由openai生成的 但是根据结果我尝试从原文本recall行 对于原文本中如果一行有偶数个引号 且引号中的文本没有被抽取,则会删除这一行原文本 所以做了一定的清洁。
silk-road/Haruhi-Dialogue-Speaker-Extract
[ "size_categories:10K<n<100K", "language:zh", "language:en", "license:cc-by-4.0", "region:us" ]
2024-01-25T02:13:01+00:00
{"language": ["zh", "en"], "license": "cc-by-4.0", "size_categories": ["10K<n<100K"], "pretty_name": "t"}
2024-01-26T02:41:03+00:00
[]
[ "zh", "en" ]
TAGS #size_categories-10K<n<100K #language-Chinese #language-English #license-cc-by-4.0 #region-us
# Chat凉宫春日的对话抽取模型 我们希望有一个模型能够从小说的chunk中批量去提取摘要和对话 这个模型就是实现了这一点。模型使用了大约30k的中文小说数据和20k的英文小说数据进行训练,在qwen-1.8上进行了3个epoch的finetune。 原则上模型同时支持中文和英文小说的训练 主项目链接 URL - 李鲁鲁完成了数据的收集,以及进一步将inference程序扩展到连续的chunks - 刘崇寒完成了模型的训练 - 米唯实测试并上传模型到hugging face # Chat Haruhi Suzumiya's Dialogue Extraction Model We hope to have a model that can extract summaries and dialogues in batches from chunks of novels. This model achieves just that. It was trained using approximately 30k Chinese novels and 20k English novels, and was fine-tuned on qwen-1.8 for three epochs. In principle, the model supports training for both Chinese and English novels. Main project link: URL # Inference Code URL # Official Prompt Chinese: English: # 数据 数据是由openai生成的 但是根据结果我尝试从原文本recall行 对于原文本中如果一行有偶数个引号 且引号中的文本没有被抽取,则会删除这一行原文本 所以做了一定的清洁。
[ "# Chat凉宫春日的对话抽取模型\n\n我们希望有一个模型能够从小说的chunk中批量去提取摘要和对话\n\n这个模型就是实现了这一点。模型使用了大约30k的中文小说数据和20k的英文小说数据进行训练,在qwen-1.8上进行了3个epoch的finetune。 原则上模型同时支持中文和英文小说的训练\n\n主项目链接 URL\n\n- 李鲁鲁完成了数据的收集,以及进一步将inference程序扩展到连续的chunks\n- 刘崇寒完成了模型的训练\n- 米唯实测试并上传模型到hugging face", "# Chat Haruhi Suzumiya's Dialogue Extraction Model\n\nWe hope to have a model that can extract summaries and dialogues in batches from chunks of novels.\n\nThis model achieves just that. It was trained using approximately 30k Chinese novels and 20k English novels, and was fine-tuned on qwen-1.8 for three epochs. In principle, the model supports training for both Chinese and English novels.\n\nMain project link: URL", "# Inference Code\n\nURL", "# Official Prompt\n\nChinese:\n\n\n\nEnglish:", "# 数据\n\n数据是由openai生成的\n\n但是根据结果我尝试从原文本recall行\n\n对于原文本中如果一行有偶数个引号 且引号中的文本没有被抽取,则会删除这一行原文本\n\n所以做了一定的清洁。" ]
[ "TAGS\n#size_categories-10K<n<100K #language-Chinese #language-English #license-cc-by-4.0 #region-us \n", "# Chat凉宫春日的对话抽取模型\n\n我们希望有一个模型能够从小说的chunk中批量去提取摘要和对话\n\n这个模型就是实现了这一点。模型使用了大约30k的中文小说数据和20k的英文小说数据进行训练,在qwen-1.8上进行了3个epoch的finetune。 原则上模型同时支持中文和英文小说的训练\n\n主项目链接 URL\n\n- 李鲁鲁完成了数据的收集,以及进一步将inference程序扩展到连续的chunks\n- 刘崇寒完成了模型的训练\n- 米唯实测试并上传模型到hugging face", "# Chat Haruhi Suzumiya's Dialogue Extraction Model\n\nWe hope to have a model that can extract summaries and dialogues in batches from chunks of novels.\n\nThis model achieves just that. It was trained using approximately 30k Chinese novels and 20k English novels, and was fine-tuned on qwen-1.8 for three epochs. In principle, the model supports training for both Chinese and English novels.\n\nMain project link: URL", "# Inference Code\n\nURL", "# Official Prompt\n\nChinese:\n\n\n\nEnglish:", "# 数据\n\n数据是由openai生成的\n\n但是根据结果我尝试从原文本recall行\n\n对于原文本中如果一行有偶数个引号 且引号中的文本没有被抽取,则会删除这一行原文本\n\n所以做了一定的清洁。" ]
[ 36, 132, 104, 6, 9, 56 ]
[ "passage: TAGS\n#size_categories-10K<n<100K #language-Chinese #language-English #license-cc-by-4.0 #region-us \n# Chat凉宫春日的对话抽取模型\n\n我们希望有一个模型能够从小说的chunk中批量去提取摘要和对话\n\n这个模型就是实现了这一点。模型使用了大约30k的中文小说数据和20k的英文小说数据进行训练,在qwen-1.8上进行了3个epoch的finetune。 原则上模型同时支持中文和英文小说的训练\n\n主项目链接 URL\n\n- 李鲁鲁完成了数据的收集,以及进一步将inference程序扩展到连续的chunks\n- 刘崇寒完成了模型的训练\n- 米唯实测试并上传模型到hugging face# Chat Haruhi Suzumiya's Dialogue Extraction Model\n\nWe hope to have a model that can extract summaries and dialogues in batches from chunks of novels.\n\nThis model achieves just that. It was trained using approximately 30k Chinese novels and 20k English novels, and was fine-tuned on qwen-1.8 for three epochs. In principle, the model supports training for both Chinese and English novels.\n\nMain project link: URL# Inference Code\n\nURL# Official Prompt\n\nChinese:\n\n\n\nEnglish:# 数据\n\n数据是由openai生成的\n\n但是根据结果我尝试从原文本recall行\n\n对于原文本中如果一行有偶数个引号 且引号中的文本没有被抽取,则会删除这一行原文本\n\n所以做了一定的清洁。" ]
[ -0.0031494805589318275, 0.11773470789194107, -0.009107028134167194, 0.05320890247821808, 0.03690725192427635, 0.008499594405293465, 0.17974576354026794, 0.07866181433200836, 0.040384721010923386, 0.022983074188232422, 0.022093020379543304, -0.060350533574819565, 0.06450563669204712, 0.08794131129980087, 0.028313135728240013, -0.26491886377334595, 0.03802020475268364, 0.04647180810570717, 0.0468657985329628, 0.07458984106779099, 0.07406869530677795, -0.041196029633283615, 0.07004082202911377, -0.010299225337803364, -0.036145128309726715, -0.03671231493353844, -0.05872632563114166, -0.04420437291264534, 0.06843935698270798, 0.005398265086114407, 0.12435880303382874, -0.009090227074921131, 0.06523841619491577, -0.07100632786750793, 0.005390099249780178, -0.009142826311290264, -0.033394865691661835, 0.006935758516192436, -0.0245224516838789, 0.10154629498720169, 0.23085308074951172, -0.05492750555276871, 0.009763482958078384, 0.020204415544867516, -0.05596296489238739, 0.02622068114578724, -0.003993664402514696, 0.11456088721752167, 0.1057654619216919, 0.11031217128038406, -0.016222793608903885, 0.07086857408285141, -0.09177298843860626, 0.06930917501449585, -0.020329540595412254, -0.23617243766784668, -0.04985522851347923, 0.18828757107257843, 0.03166737034916878, 0.08465985953807831, -0.0762098878622055, -0.0029047373682260513, 0.07570014894008636, -0.017232775688171387, -0.084536112844944, -0.064817875623703, 0.021440261974930763, -0.020689334720373154, -0.09794361889362335, 0.03930441290140152, 0.210862398147583, 0.03276088461279869, -0.05221393704414368, -0.08763142675161362, -0.021710136905312538, -0.011346862651407719, -0.045811254531145096, -0.013490728102624416, 0.02436518855392933, 0.025912418961524963, 0.07776511460542679, -0.0382523238658905, -0.04076385125517845, -0.09824709594249725, -0.038503363728523254, 0.03923709690570831, 0.045737165957689285, 0.021562060341238976, -0.06750848889350891, 0.01810998097062111, -0.042276229709386826, -0.07332339137792587, -0.020040597766637802, -0.08399781584739685, -0.0019288616022095084, 0.027790438383817673, 0.02052774466574192, -0.025270145386457443, 0.04713550955057144, -0.012429594993591309, 0.012247640639543533, 0.07364754378795624, 0.02658177725970745, 0.04836439713835716, 0.0781218484044075, 0.04646160453557968, -0.022724054753780365, -0.025534434244036674, 0.0010131463641300797, 0.03210873156785965, 0.0703393965959549, -0.0037996892351657152, -0.08374326676130295, -0.024612564593553543, -0.07428188621997833, -0.009318046271800995, -0.05043834447860718, 0.04817773401737213, -0.0013496174942702055, -0.003383226692676544, 0.04502956196665764, -0.04806425794959068, -0.04664098843932152, -0.005487991496920586, -0.09485375881195068, 0.09314477443695068, -0.01958487369120121, 0.007958276197314262, -0.026299279183149338, -0.008711842820048332, -0.057260897010564804, -0.014969553798437119, -0.012948863208293915, -0.045758891850709915, 0.021624991670250893, 0.00682092597708106, 0.0201779343187809, -0.06172547489404678, -0.13545185327529907, -0.0666351243853569, 0.06259822100400925, -0.037594184279441833, -0.03517146781086922, -0.06129590794444084, -0.05185383930802345, -0.023863591253757477, 0.03558897227048874, -0.05639667063951492, -0.018239380791783333, -0.002209775149822235, 0.0203680619597435, 0.09211796522140503, -0.07340843975543976, 0.0046100192703306675, -0.07239735871553421, 0.026256946846842766, -0.09713164716959, 0.09424353390932083, -0.009774544276297092, 0.039128560572862625, -0.014823079109191895, -0.004273813217878342, -0.03298409655690193, 0.046115122735500336, 0.021078787744045258, 0.1332125961780548, -0.14838211238384247, -0.009205893613398075, 0.17237375676631927, -0.10908708721399307, -0.0951896458864212, 0.1139170303940773, -0.021426785737276077, 0.12937942147254944, 0.07288674265146255, 0.176855206489563, 0.010848509147763252, -0.047373995184898376, -0.042561836540699005, 0.0721675455570221, -0.002176278969272971, 0.04092683643102646, 0.08216767758131027, 0.06395286321640015, 0.0035579726099967957, 0.026873888447880745, -0.035626351833343506, -0.06473957002162933, -0.034654904156923294, -0.0801500454545021, -0.008880753070116043, -0.041557684540748596, -0.05059807375073433, 0.005526253953576088, 0.09489283710718155, -0.01888345181941986, 0.01108147669583559, -0.11602293699979782, 0.11914264410734177, -0.020346712321043015, 0.019830243661999702, -0.050957731902599335, -0.024054650217294693, 0.1251915991306305, 0.04298798739910126, -0.04511930048465729, 0.05501618608832359, 0.031602684408426285, 0.03362921625375748, 0.050465911626815796, 0.0895242691040039, -0.026561720296740532, 0.026540352031588554, -0.031926076859235764, 0.015350568108260632, -0.015291084535419941, -0.019850222393870354, -0.05726262927055359, -0.11112692207098007, 0.03225880488753319, -0.03417067229747772, 0.05434070900082588, -0.1750854104757309, 0.021895013749599457, -0.09584077447652817, 0.027960598468780518, -0.053276434540748596, 0.028230320662260056, 0.05343875288963318, 0.035205889493227005, -0.06071561947464943, -0.002864688402041793, 0.026990357786417007, 0.028524480760097504, -0.13320529460906982, 0.07432568073272705, 0.018983785063028336, -0.018899695947766304, 0.11400081217288971, -0.02341269701719284, -0.07208713889122009, -0.03656923770904541, -0.014948971569538116, -0.004396736156195402, -0.00987596157938242, 0.03243345767259598, 0.127078577876091, -0.02305642142891884, 0.08900870382785797, -0.07651914656162262, 0.07912954688072205, -0.01060266699641943, -0.06174703687429428, 0.04303793981671333, 0.10261625051498413, 0.053630802780389786, -0.17739929258823395, 0.031061386689543724, -0.002608648268505931, -0.01611754484474659, 0.14434966444969177, 0.02053721994161606, -0.025808896869421005, -0.05660964176058769, 0.055630918592214584, -0.027857467532157898, 0.09541764110326767, -0.1392737776041031, -0.05337659642100334, -0.0052513726986944675, 0.01695447415113449, 0.025260768830776215, -0.09003912657499313, -0.06987445056438446, -0.023184435442090034, -0.037099458277225494, -0.09880819171667099, 0.0025507875252515078, -0.11157906800508499, 0.07663919031620026, -0.01562710478901863, -0.014526727609336376, -0.018507827073335648, -0.04460025206208229, -0.08391518145799637, 0.08859120309352875, -0.016843976452946663, -0.13488106429576874, 0.011989296413958073, 0.003390242112800479, 0.0026667665224522352, 0.02968389168381691, 0.0011702891206368804, -0.1336652934551239, -0.046319104731082916, -0.06185544282197952, -0.029712053015828133, -0.09456396102905273, -0.04329628497362137, 0.04901266098022461, 0.09737978875637054, -0.07522166520357132, -0.06557730585336685, 0.006531874183565378, -0.04565403237938881, -0.04081188514828682, 0.02743179351091385, -0.17578592896461487, 0.0294874869287014, 0.102964848279953, 0.042831793427467346, -0.021213069558143616, -0.003738302970305085, 0.11479514837265015, -0.0762953981757164, 0.03413115069270134, 0.04555485025048256, 0.030691320076584816, 0.015376090072095394, 0.11955228447914124, 0.024660782888531685, -0.05796957388520241, 0.06694083660840988, -0.032422855496406555, -0.06251082569360733, -0.18159425258636475, -0.037618476897478104, -0.06238211691379547, 0.07437283545732498, -0.0061064548790454865, 0.035186465829610825, 0.022725729271769524, 0.06862282752990723, -0.004372454714030027, 0.12898407876491547, 0.03782281279563904, 0.03879820182919502, 0.03456651791930199, -0.011013020761311054, 0.030177868902683258, -0.08416008949279785, 0.02039233222603798, 0.04237140342593193, -0.024846151471138, 0.10931336134672165, 0.019759412854909897, 0.08317355066537857, 0.046871013939380646, 0.12245312333106995, 0.07940105348825455, 0.03169335797429085, 0.017679374665021896, -0.021065060049295425, -0.07031698524951935, -0.020843030884861946, -0.037803687155246735, 0.10480710119009018, -0.011113469488918781, -0.06316018104553223, -0.030767181888222694, 0.14703693985939026, 0.04987720027565956, 0.038813911378383636, 0.09073047339916229, -0.061990562826395035, -0.04830294847488403, 0.024195920675992966, 0.00169552571605891, 0.0014215277042239904, 0.10571878403425217, 0.11275209486484528, -0.1277085840702057, 0.02673465944826603, 0.06254774332046509, 0.11744710057973862, -0.05601375550031662, 0.015334016643464565, -0.09758732467889786, -0.060607120394706726, -0.0034176059998571873, 0.043677449226379395, -0.23997096717357635, 0.22553566098213196, 0.0044507416896522045, 0.07036065310239792, -0.04262438789010048, -0.04192532226443291, 0.06959228217601776, 0.04882843792438507, 0.1468324512243271, 0.028178129345178604, -0.07806119322776794, -0.006448885425925255, -0.11562883853912354, 0.05644586682319641, 0.054604850709438324, 0.024711821228265762, 0.03681012988090515, 0.018248209729790688, -0.0036832557525485754, -0.02195536345243454, 0.07028269022703171, -0.10219617933034897, -0.08232171833515167, -0.032502125948667526, 0.07764995098114014, -0.029173672199249268, 0.021609414368867874, -0.017080528661608696, 0.042428769171237946, 0.08682015538215637, 0.017710264772176743, -0.07507291436195374, -0.001973871374502778, 0.03278931975364685, 0.06903275102376938, -0.02914300560951233, -0.0001664766314206645, -0.009120400063693523, -0.004140667151659727, -0.006059292238205671, 0.0055829002521932125, 0.008491581305861473, -0.06811012327671051, -0.07882272452116013, -0.043416980654001236, 0.05525650456547737, 0.04298265650868416, 0.08330481499433517, 0.0076879216358065605, -0.030470184981822968, 0.004507619887590408, -0.13133837282657623, -0.027366671711206436, 0.002985951490700245, 0.004245306830853224, 0.08366371691226959, -0.12032733857631683, -0.03856296092271805, -0.09347910434007645, -0.09920841455459595, 0.05214719846844673, 0.09675301611423492, -0.00154784694314003, 0.03635907173156738, 0.17130228877067566, -0.05675630643963814, -0.14260804653167725, -0.06353666633367538, 0.005425805691629648, 0.04454519972205162, -0.06864246726036072, -0.18313442170619965, 0.04606226086616516, -0.000462399679236114, 0.017183557152748108, -0.028730086982250214, -0.16325706243515015, -0.0820053294301033, 0.022071504965424538, -0.027011476457118988, 0.09265827387571335, -0.09052012115716934, -0.06693970412015915, 0.004599348176270723, -0.038195397704839706, 0.040618106722831726, -0.07368473708629608, 0.08117745071649551, 0.000992852495983243, -0.0018404899165034294, 0.007695878855884075, -0.009407460689544678, 0.10383879393339157, 0.028914377093315125, 0.02016996592283249, -0.07695272564888, -0.11461351066827774, 0.06936463713645935, -0.01746118627488613, 0.14432457089424133, -0.025820394977927208, 0.05830127000808716, -0.07924904674291611, -0.040804971009492874, -0.022471586242318153, -0.06761223077774048, -0.04652371257543564, -0.03974711149930954, -0.10631882399320602, 0.11078553646802902, 0.040235813707113266, 0.024947503581643105, 0.11820019036531448, 0.02780979685485363, -0.020381977781653404, 0.030377238988876343, 0.055545832961797714, 0.05648873373866081, 0.08357328921556473, -0.014855784364044666, -0.00964248739182949, 0.0649680644273758, -0.19018220901489258, -0.05570245161652565, 0.0945415198802948, 0.043590325862169266, 0.06013648211956024, 0.05387985706329346, -0.019626663997769356, 0.047176189720630646, 0.04450399428606033, -0.07168016582727432, -0.08726776391267776, -0.04077523946762085, -0.09079936891794205, 0.025891141965985298, 0.00951629038900137, 0.02215128019452095, -0.09506067633628845, 0.01619037427008152, -0.008947927504777908, 0.0167311392724514, -0.020311379805207253, 0.04863116145133972, 0.011623235419392586, 0.006039139814674854, -0.0464850477874279, 0.0606490820646286, 0.08921455591917038, -0.02160818502306938, 0.046726908534765244, 0.0793880745768547, -0.10043853521347046, -0.013596650213003159, -0.055862173438072205, 0.09181839972734451, 0.01999587006866932, -0.04543653875589371, -0.0662018358707428, -0.03394255414605141, -0.027537468820810318, 0.037536248564720154, 0.050461545586586, 0.047347430139780045, -0.01133098267018795, -0.017051002010703087, -0.05825193226337433, 0.012232315726578236, 0.005426423158496618, -0.025514476001262665, -0.08409934490919113, 0.07298829406499863, 0.05799972265958786, 0.04777972772717476, -0.037458062171936035, -0.026816166937351227, -0.11892577260732651, 0.039853546768426895, -0.08743734657764435, 0.08755648136138916, -0.13405194878578186, -0.03951568156480789, -0.04063274711370468, -0.03408251330256462, -0.09142038226127625, 0.011594188399612904, -0.021470779553055763, 0.002439554315060377, -0.012062213383615017, 0.07037309557199478, -0.007809260860085487, 0.008926136419177055, 0.03138678893446922, -0.05419225990772247, 0.011085161939263344, 0.07058575004339218, -0.030698129907250404, 0.028953667730093002, -0.03983907774090767, 0.07413184642791748, 0.0020844077225774527, 0.0428437814116478, -0.0011122869327664375, 0.027402613312005997, -0.052768271416425705, 0.027305861935019493, 0.03674614056944847, 0.023454364389181137, 0.03911567106842995, -0.01546983141452074, -0.030301358550786972, -0.06282103806734085, -0.046285782009363174, -0.05722440406680107, 0.034238118678331375, 0.11947561800479889, 0.055551640689373016, 0.0864129513502121, -0.029014762490987778, 0.08494419604539871, -0.09896568953990936, 0.014124986715614796, -0.01049601286649704, 0.028786201030015945, -0.037731826305389404, -0.06586608290672302, 0.026363946497440338, -0.04139593616127968, 0.22728197276592255, -0.02388845384120941, -0.047300465404987335, 0.005563956685364246, -0.0016053809085860848, -0.02889079600572586, -0.009714791551232338, 0.1833145022392273, 0.09307962656021118, -0.020513691008090973, -0.024215035140514374, -0.008067525923252106, -0.02561832033097744, 0.008865266107022762, 0.05384301021695137, 0.03894449397921562, 0.09428329765796661, 0.10852095484733582, -0.005439813248813152, 0.007024120539426804, 0.02241789922118187, 0.01798858493566513, -0.04899650067090988, -0.014061974361538887, -0.0060827238485217094, 0.025555262342095375, 0.09703080356121063, -0.07523185759782791, 0.07185754925012589, -0.048643991351127625, -0.10470760613679886, -0.07780575752258301, -0.08345476537942886, -0.07376313954591751, -0.08571691066026688, 0.017239190638065338, -0.09078649431467056, 0.02914354018867016, 0.0058061834424734116, 0.04265238717198372, -0.049174338579177856, 0.07904944568872452, 0.017943356186151505, -0.05210768058896065, 0.1100531816482544, -0.02160748466849327, -0.007227666676044464, -0.05416681990027428, -0.0032666355837136507, -0.038263190537691116, -0.027359794825315475, -0.03241172060370445, 0.05748189240694046, -0.0040314458310604095, 0.006253472063690424, 0.013231249526143074, -0.01982833258807659, -0.022072186693549156, -0.014335890300571918, 0.10050167143344879, 0.06511721760034561, 0.04201686754822731, -0.07531262189149857, 0.007589960005134344, 0.17035140097141266, 0.00294225150719285, -0.0440324991941452, -0.1429402083158493, 0.12172956019639969, -0.059692081063985825, -0.05229397118091583, -0.0014037734363228083, -0.028374768793582916, -0.013955578207969666, 0.20608511567115784, 0.1737169325351715, -0.10198619216680527, -0.033052556216716766, -0.016342343762516975, 0.003064313903450966, -0.013722908683121204, 0.041634999215602875, 0.07316647469997406, 0.26038679480552673, -0.04955259710550308, -0.0313415490090847, -0.09662070870399475, -0.003371977945789695, -0.019981103017926216, 0.031609151512384415, 0.07462488114833832, -0.03786647319793701, -0.08236218243837357, 0.11793457716703415, -0.16913804411888123, -0.09741520881652832, -0.004401496145874262, -0.1047757938504219, -0.05042504146695137, 0.020618809387087822, -0.05458178371191025, 0.11820360273122787, 0.06627482175827026, -0.017670875415205956, 0.06636280566453934, -0.018871361389756203, 0.006160196848213673, -0.059539664536714554, -0.04392033815383911, 0.12314709275960922, -0.01736554130911827, 0.14087732136249542, 0.02206110954284668, 0.08966810256242752, 0.03146553412079811, 0.0010631639743223786, -0.001440912950783968, 0.09318385273218155, 0.01224229484796524, 0.03776809573173523, -0.03665505722165108, 0.057297252118587494, -0.03137035667896271, 0.03499767556786537, 0.10793616622686386, 0.012717276811599731, 0.06894627213478088, 0.15404550731182098, -0.004673732444643974, -0.1545039713382721, 0.0744277760386467, -0.11566980183124542, 0.19870109856128693, 0.16801567375659943, -0.051507119089365005, 0.005088025238364935, 0.03698541596531868, 0.06474828720092773, -0.05163314938545227, -0.03657436743378639, -0.009943455457687378, -0.14296212792396545, 0.019730940461158752, -0.009746742434799671, 0.014520312659442425, -0.1634022444486618, -0.026667051017284393, 0.006288821808993816, -0.013814015313982964, -0.041722364723682404, -0.010176577605307102, 0.10231214016675949, 0.02337537333369255, -0.027123739942908287, -0.08130764216184616, 0.0019829156808555126, 0.041164085268974304, -0.06615256518125534, -0.15015920996665955 ]
70cf2e6b000d960e8611e10b5a83ac54adbd3b4f
# 🚀 Load Dataset ```python from datasets import load_dataset dataset = load_dataset("shuyuej/prompt_consistency_training_full_data") dataset = dataset["train"] print(dataset) ```
shuyuej/prompt_consistency_training_full_data
[ "license:apache-2.0", "region:us" ]
2024-01-25T02:18:35+00:00
{"license": "apache-2.0"}
2024-01-25T18:21:03+00:00
[]
[]
TAGS #license-apache-2.0 #region-us
# Load Dataset
[ "# Load Dataset" ]
[ "TAGS\n#license-apache-2.0 #region-us \n", "# Load Dataset" ]
[ 14, 5 ]
[ "passage: TAGS\n#license-apache-2.0 #region-us \n# Load Dataset" ]
[ -0.07001654803752899, 0.1904653012752533, -0.004998877178877592, 0.00907934457063675, -0.0028538380283862352, 0.054394908249378204, 0.16303427517414093, 0.12618236243724823, 0.1763383150100708, -0.06096614897251129, 0.09710494428873062, 0.04643046483397484, 0.020282577723264694, 0.11439700424671173, 0.015474379062652588, -0.10245008021593094, 0.11215199530124664, -0.03296778351068497, -0.15413860976696014, -0.020041679963469505, 0.06634549796581268, 0.008282235823571682, 0.018562356010079384, -0.06968339532613754, -0.006373110227286816, 0.03640283644199371, 0.0020335251465439796, 0.016162084415555, 0.02330676279962063, -0.038277264684438705, 0.005731794983148575, 0.029110433533787727, 0.03391251340508461, -0.20263391733169556, 0.002159419935196638, 0.011697918176651001, -0.072933629155159, 0.021473249420523643, 0.015562492422759533, 0.013006992638111115, -0.014430290088057518, 0.008538242429494858, -0.04590527340769768, 0.004893551114946604, -0.029969552531838417, -0.23031283915042877, -0.13999857008457184, 0.03098377026617527, 0.061859868466854095, 0.04261074587702751, 0.08520697802305222, 0.112308070063591, -0.1684703677892685, -0.023576728999614716, 0.05644429847598076, -0.2658948600292206, 0.03770916908979416, 0.1925373077392578, -0.027282025665044785, 0.05708051100373268, -0.032640136778354645, 0.014179641380906105, 0.09702048450708389, -0.03234678879380226, -0.05952118709683418, -0.019281448796391487, -0.14307193458080292, 0.12061256915330887, 0.007947882637381554, -0.07937075942754745, 0.4276890456676483, 0.08947485685348511, 0.03446684032678604, 0.04088602960109711, -0.04162471741437912, 0.08802156895399094, -0.00882687047123909, 0.11228219419717789, 0.10370917618274689, 0.17537449300289154, 0.09373819082975388, -0.05837281793355942, -0.1387629508972168, -0.0764801874756813, -0.14893755316734314, -0.0881175547838211, 0.002753246808424592, 0.14657826721668243, -0.11670559644699097, -0.011121216230094433, -0.013173341751098633, -0.08000656962394714, -0.016438452526926994, -0.06224498152732849, 0.05970108509063721, 0.07276752591133118, -0.06468930095434189, 0.08973073959350586, 0.19600972533226013, 0.24304203689098358, 0.10773353278636932, -0.0031716858502477407, -0.09783545881509781, 0.1311764270067215, 0.02450001984834671, 0.0035947742871940136, 0.031216908246278763, -0.03225255012512207, 0.1274290233850479, -0.11814776808023453, 0.10293827950954437, -0.03236187621951103, -0.12062004208564758, -0.015531999059021473, -0.12012672424316406, 0.0924612358212471, 0.12156467139720917, -0.07433512806892395, -0.02871553599834442, -0.0001638674148125574, 0.1914612352848053, -0.07343824207782745, 0.011609512381255627, 0.037060488015413284, -0.04014584422111511, 0.019367489963769913, 0.07047323882579803, 0.01618986763060093, 0.051165465265512466, -0.07353158295154572, -0.07518532872200012, -0.04754794016480446, 0.006732400972396135, 0.08390267193317413, 0.16801878809928894, -0.0787782073020935, 0.02774466574192047, -0.07314363867044449, -0.2640254497528076, 0.015318970195949078, 0.10718841850757599, 0.01326839905232191, -0.04644060134887695, 0.0937519520521164, 0.04369577765464783, 0.0219112578779459, -0.07621479034423828, 0.021636027842760086, -0.10924455523490906, 0.01635110005736351, -0.18735843896865845, -0.00034857707214541733, -0.21728886663913727, 0.029500821605324745, -0.14498181641101837, 0.015984103083610535, 0.008336689323186874, -0.036203473806381226, -0.16693279147148132, 0.18584993481636047, -0.12228277325630188, 0.07665608078241348, -0.022888079285621643, -0.027417439967393875, -0.05536381155252457, 0.10486848652362823, -0.1901201605796814, 0.013646816834807396, 0.11956170201301575, -0.1315540224313736, -0.1981583535671234, -0.008748158812522888, 0.032004132866859436, 0.03905763477087021, 0.019999176263809204, 0.2750741243362427, 0.03931259736418724, -0.039619818329811096, 0.05513544753193855, 0.21230784058570862, -0.037666887044906616, -0.30606216192245483, 0.1337161511182785, -0.14175526797771454, -0.09446083754301071, 0.035942576825618744, -0.05984149128198624, 0.10572918504476547, 0.05563924461603165, -0.09842493385076523, -0.05832784250378609, -0.11846277117729187, -0.05080516263842583, -0.007119585759937763, 0.011414838954806328, -0.003772859927266836, 0.06987990438938141, -0.03394223004579544, 0.12409792840480804, 0.0430963933467865, 0.06995843350887299, -0.0019742429722100496, 0.010701647028326988, -0.032948900014162064, 0.01685371808707714, -0.05274543538689613, -0.11824917048215866, 0.03222540020942688, -0.07840131968259811, 0.010248200036585331, 0.0754818469285965, 0.08918923884630203, -0.10875682532787323, 0.004986066371202469, 0.054788678884506226, 0.05776919424533844, 0.09128472954034805, 0.004639789462089539, -0.1553730070590973, 0.03658977895975113, -0.007291417568922043, 0.11663542687892914, 0.013290156610310078, -0.015077928081154823, 0.02553686872124672, 0.030704280361533165, -0.01582513377070427, 0.021987546235322952, 0.012808484956622124, -0.15357114374637604, 0.013035740703344345, -0.029535915702581406, 0.0515626035630703, 0.07516250759363174, -0.10649967193603516, 0.17749257385730743, 0.05246195197105408, 0.0937701091170311, 0.1691731959581375, -0.0031942089553922415, 0.15303511917591095, -0.07711337506771088, -0.03034058026969433, -0.08476773649454117, 0.019114429131150246, -0.03520386293530464, -0.19912157952785492, 0.022483263164758682, -0.006603246089071035, -0.03966274857521057, 0.011731458827853203, -0.05206333473324776, -0.05032000690698624, -0.01633274368941784, -0.012912747450172901, 0.22682836651802063, -0.0740542858839035, 0.16127049922943115, 0.3929736912250519, 0.009790927171707153, 0.03198212757706642, -0.15585008263587952, -0.08811907470226288, 0.025037283077836037, -0.02013486996293068, -0.07733090966939926, 0.1421229988336563, -0.08407855778932571, 0.07925056666135788, 0.11559281498193741, 0.07515023648738861, 0.048295337706804276, -0.09319842606782913, -0.08565586805343628, -0.016870850697159767, -0.06494379788637161, -0.0757361501455307, 0.006179455202072859, -0.09064626693725586, 0.038997307419776917, -0.009553880430758, -0.08711880445480347, 0.1416091024875641, -0.06720657646656036, -0.07858653366565704, 0.08501026779413223, -0.1773792803287506, -0.017761990427970886, -0.04987482354044914, -0.05524556711316109, -0.05987956374883652, -0.011223694309592247, 0.03956194967031479, -0.07345953583717346, -0.06481890380382538, -0.0044582197442650795, -0.11862372606992722, 0.05133015662431717, -0.002835777821019292, -0.001831702538765967, 0.07515180855989456, 0.020108293741941452, -0.13118132948875427, -0.020894730463624, 0.014332571998238564, -0.011530141346156597, 0.020547814667224884, -0.12734408676624298, 0.08069302886724472, 0.11161760985851288, 0.07985270023345947, 0.040471069514751434, -0.002052333438768983, 0.08670412749052048, -0.0023793831933289766, 0.0037481726612895727, 0.16268162429332733, 0.012608258984982967, 0.018607959151268005, 0.11706527322530746, 0.05305314064025879, -0.048632312566041946, 0.0070783342234790325, 0.014489368535578251, -0.11434327811002731, -0.3088737428188324, -0.126278817653656, -0.0808558538556099, 0.07748565077781677, 0.08287206292152405, 0.14348120987415314, 0.05805513635277748, 0.07937482744455338, -0.020483603700995445, 0.006435450632125139, 0.013887143693864346, -0.036056581884622574, 0.031847286969423294, -0.035910844802856445, -0.01373085007071495, -0.17227716743946075, 0.07147528976202011, 0.19122646749019623, 0.1603180468082428, 0.19476060569286346, 0.19143876433372498, 0.13832567632198334, 0.09698405116796494, 0.18953464925289154, -0.040424197912216187, 0.12604868412017822, 0.058183349668979645, 0.027232512831687927, -0.01953265815973282, -0.0491039864718914, -0.0196559876203537, 0.10182230174541473, 0.03333480656147003, -0.174533411860466, 0.03443571925163269, -0.16347607970237732, 0.07128030061721802, 0.12810580432415009, 0.09226731956005096, -0.07216334342956543, 0.11737333983182907, 0.12907880544662476, 0.11842317879199982, 0.0372268371284008, 0.13275204598903656, -0.0738038718700409, -0.04381672292947769, 0.11593903601169586, 0.027494866400957108, 0.10757297277450562, 0.006952364929020405, -0.0514223575592041, -0.05423459783196449, -0.1831110715866089, 0.07742337882518768, 0.1742519736289978, -0.11897630244493484, 0.1527315080165863, 0.007360770832747221, -0.07842399924993515, -0.10153786838054657, -0.035853754729032516, 0.07475770264863968, 0.13980795443058014, 0.10237900912761688, 0.09461832791566849, -0.16316671669483185, 0.1071644052863121, -0.18991471827030182, 0.03761889785528183, -0.06512397527694702, -0.02938513644039631, -0.13786184787750244, -0.03136984631419182, 0.018966924399137497, 0.04204443097114563, 0.14526686072349548, -0.08443856239318848, -0.0934140607714653, -0.03135392814874649, 0.16512109339237213, -0.07707548886537552, -0.09012707322835922, 0.04150933399796486, -0.01470975112169981, 0.13258203864097595, -0.0013804734917357564, -0.03641519695520401, -0.0624312199652195, -0.13648180663585663, 0.12659449875354767, -0.006370837800204754, -0.01564682088792324, -0.04408372566103935, -0.053515125066041946, -0.07499849051237106, -0.22749395668506622, 0.09926195442676544, -0.11815635859966278, 0.029708122834563255, -0.05135143920779228, 0.08055438846349716, -0.04865441098809242, 0.011574150063097477, 0.013341099955141544, 0.00196447572670877, -0.04447980970144272, -0.12119755893945694, 0.07120175659656525, 0.05407170578837395, 0.0179436057806015, 0.07252787798643112, -0.05706556513905525, 0.04152284935116768, 0.139897882938385, -0.08164766430854797, 0.1449587196111679, 0.16808827221393585, -0.08507421612739563, 0.16733917593955994, 0.3047759532928467, -0.09884221851825714, -0.27654603123664856, -0.13852934539318085, -0.22699107229709625, -0.1497262865304947, 0.05740240216255188, -0.16743381321430206, 0.1785702109336853, 0.15324127674102783, -0.17003217339515686, 0.15567822754383087, -0.20013009011745453, -0.04761470854282379, 0.22139687836170197, -0.07744777947664261, 0.3270156681537628, -0.15036125481128693, -0.06321949511766434, -0.1357784867286682, -0.14006933569908142, 0.1640874743461609, -0.2520224452018738, 0.009696963243186474, 0.03358783572912216, -0.07571630924940109, -0.053476277738809586, -0.06294088065624237, 0.20365726947784424, 0.08864487707614899, 0.048724330961704254, -0.07783883810043335, 0.0538349449634552, 0.17002438008785248, -0.08258774876594543, 0.11427507549524307, -0.1551506072282791, -0.007821562699973583, -0.11334814876317978, 0.049372438341379166, -0.007536802440881729, 0.07303659617900848, 0.018435997888445854, -0.055110782384872437, -0.09097205847501755, -0.012564500793814659, -0.0010017354506999254, 0.017773348838090897, 0.2545160949230194, 0.12870335578918457, -0.09068934619426727, 0.12058752775192261, -0.08183950185775757, -0.10650019347667694, -0.09857156872749329, -0.09697787463665009, -0.09271606057882309, 0.05486408993601799, -0.29307621717453003, 0.06150501221418381, 0.04435229301452637, -0.056741055101156235, 0.021166298538446426, 0.04856487363576889, -0.07821919023990631, -0.047246869653463364, 0.10806192457675934, -0.05617867782711983, 0.0060365828685462475, 0.06188586354255676, 0.06320629268884659, 0.01908440701663494, 0.015047809109091759, 0.07980872690677643, 0.02779097482562065, 0.03299710527062416, 0.02156687341630459, 0.1232525184750557, -0.1096741333603859, 0.025780048221349716, 0.08234716206789017, -0.03277469798922539, -0.1291341930627823, 0.27868539094924927, 0.0328671969473362, -0.07490034401416779, -0.014853513799607754, 0.02017960511147976, -0.08733731508255005, -0.11030066758394241, 0.03357265517115593, 0.05562034994363785, -0.0790076032280922, -0.16002187132835388, 0.04434940963983536, -0.04750889167189598, -0.011495170183479786, -0.09123263508081436, 0.12545834481716156, 0.11386078596115112, 0.07958052307367325, -0.08150102943181992, 0.09422098100185394, -0.015888547524809837, -0.11720315366983414, -0.00965417642146349, -0.04330809786915779, -0.27147001028060913, 0.0114149060100317, 0.07767054438591003, -0.01839832030236721, -0.024710092693567276, -0.05538126826286316, 0.068058542907238, -0.18358135223388672, 0.023774465546011925, -0.05291692540049553, 0.013881206512451172, 0.0013482654467225075, -0.06351982802152634, -0.013056534342467785, 0.018374785780906677, -0.11782747507095337, -0.05090685561299324, -0.03275580331683159, 0.07783249765634537, -0.16030682623386383, -0.0877394899725914, 0.11273340880870819, 0.03247608616948128, 0.1116083636879921, 0.11100348085165024, 0.0029238115530461073, 0.09652310609817505, -0.08867620676755905, -0.10137758404016495, 0.028416428714990616, 0.05850553140044212, -0.004717225208878517, 0.0338771790266037, -0.08107476681470871, 0.0965980663895607, -0.08487499505281448, 0.0017951868940144777, -0.03430997580289841, -0.11891184002161026, -0.1062634065747261, -0.07948566228151321, -0.1201784610748291, 0.039140596985816956, -0.16233326494693756, 0.17395327985286713, 0.09610553085803986, 0.10544848442077637, 0.07115017622709274, -0.017539754509925842, -0.051834944635629654, 0.0012812841450795531, -0.03794896602630615, -0.046503376215696335, -0.12840472161769867, 0.03780418261885643, -0.07327639311552048, -0.09395363926887512, 0.3460042476654053, -0.03070560283958912, -0.12798358500003815, 0.048227906227111816, 0.14436577260494232, 0.05600818246603012, -0.00208035996183753, 0.2746630012989044, 0.046842265874147415, 0.03356502205133438, -0.05823233723640442, 0.006521868985146284, 0.05556127429008484, -0.07734274864196777, 0.005020815413445234, 0.05489637702703476, 0.12683485448360443, 0.04395321011543274, 0.04137979447841644, -0.1202191486954689, -0.02280261740088463, 0.010061034001410007, 0.08806835114955902, 0.06402159482240677, 0.03048405982553959, 0.09209851920604706, 0.11836949735879898, -0.03152010589838028, -0.016634559258818626, -0.03362197056412697, 0.015894491225481033, -0.16492749750614166, -0.13232536613941193, -0.02111920900642872, -0.1728866845369339, -0.0008740238845348358, -0.003787984373047948, -0.04413442686200142, 0.26616278290748596, 0.05202426016330719, -0.014622442424297333, -0.0725850760936737, -0.14192940294742584, 0.008088779635727406, -0.0745602697134018, -0.025967802852392197, -0.0486544668674469, 0.029924146831035614, -0.07588427513837814, 0.02722756192088127, -0.05634620040655136, -0.06126495078206062, 0.059751976281404495, 0.09817507117986679, 0.1141197606921196, -0.06400559097528458, -0.037795860320329666, -0.12780417501926422, 0.005348340142518282, -0.031182952225208282, 0.19150452315807343, 0.0732182040810585, 0.07031357288360596, 0.11209698021411896, 0.07329583913087845, -0.047418445348739624, -0.11615771055221558, -0.050271112471818924, -0.035710208117961884, -0.03748736158013344, 0.04092748463153839, -0.03058774583041668, -0.03980829566717148, -0.04853656142950058, 0.19831372797489166, 0.26129576563835144, -0.08521967381238937, -0.0005671381950378418, -0.0002930442860815674, 0.003739734645932913, 0.00204873806796968, 0.1472928375005722, 0.05853525921702385, 0.11207357794046402, -0.051810771226882935, -0.0021520762238651514, -0.04380796104669571, -0.02876044623553753, -0.16162092983722687, 0.07960914820432663, -0.03894373029470444, -0.10289120674133301, -0.030670279636979103, 0.14215749502182007, -0.06743727624416351, 0.07221474498510361, 0.06153428182005882, -0.055546604096889496, -0.0187344029545784, -0.01849968172609806, 0.15872111916542053, 0.0667259618639946, 0.006397204007953405, -0.11801804602146149, 0.009210037998855114, 0.04581350460648537, -0.041233129799366, -0.30970680713653564, -0.18251237273216248, 0.08559861034154892, 0.09826260805130005, 0.2934248149394989, 0.01480457279831171, 0.1105048730969429, 0.012399069964885712, 0.023192690685391426, -0.17247705161571503, 0.06305760145187378, 0.02640485018491745, -0.04569758474826813, -0.06197669357061386, -0.21027415990829468, -0.15482737123966217, -0.012660115025937557, 0.07564311474561691, 0.07467310130596161, -0.013488510623574257, 0.1677602231502533, -0.04988284036517143, -0.08345402032136917, -0.02381213940680027, -0.11725359410047531, 0.09443474560976028, -0.06419520080089569, -0.07034330070018768, -0.07380110025405884, -0.04098692163825035, 0.006217553745955229, 0.03104177676141262, -0.26473453640937805, -0.035998400300741196, 0.14492513239383698, -0.0039027442689985037, 0.11870933324098587, 0.06679368764162064, 0.06207719072699547, 0.021644730120897293, -0.05295679718255997, -0.00618883827701211, -0.061871565878391266, 0.044328924268484116, 0.07077302038669586, -0.0393492691218853, 0.0051862443797290325, -0.09827139228582382, 0.04674050584435463, -0.040249425917863846, -0.05528632551431656, -0.1310999095439911 ]
a8bf4e76dd4f1d1767c90e16b5abda8c9e13893a
This dataset is a mixture of the following NLI (Natural Language Inference) and TE (Textual Entailment) datasets: - SNLI - MNLI - Fever - SciTail - PAWS - VitaminC To merge the datasets together, labels has been converted to "entailment" and "non-entailment" using an NLI format with features "premise" and "hypothesis".
AntoineBlanot/nli_mixture
[ "region:us" ]
2024-01-25T02:56:06+00:00
{"dataset_info": {"features": [{"name": "premise", "dtype": "string"}, {"name": "hypothesis", "dtype": "string"}, {"name": "label_name", "dtype": "string"}, {"name": "subset", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 351587589, "num_examples": 1594065}, {"name": "test", "num_bytes": 29289215, "num_examples": 112013}], "download_size": 149650337, "dataset_size": 380876804}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}, {"split": "test", "path": "data/test-*"}]}]}
2024-01-25T03:10:46+00:00
[]
[]
TAGS #region-us
This dataset is a mixture of the following NLI (Natural Language Inference) and TE (Textual Entailment) datasets: - SNLI - MNLI - Fever - SciTail - PAWS - VitaminC To merge the datasets together, labels has been converted to "entailment" and "non-entailment" using an NLI format with features "premise" and "hypothesis".
[]
[ "TAGS\n#region-us \n" ]
[ 6 ]
[ "passage: TAGS\n#region-us \n" ]
[ 0.024608636274933815, -0.026205500587821007, -0.009666500613093376, -0.10395516455173492, 0.08638657629489899, 0.059816278517246246, 0.01882290467619896, 0.020661840215325356, 0.23975107073783875, -0.005599027033895254, 0.1219947561621666, 0.0015615287702530622, -0.037353623658418655, 0.03733762726187706, -0.0035912662278860807, -0.17583473026752472, 0.03876631706953049, -0.018274923786520958, 0.01843859627842903, 0.026470553129911423, -0.07776834815740585, -0.07564429938793182, 0.015296397730708122, -0.10247814655303955, -0.083692267537117, 0.11002834886312485, 0.031466204673051834, -0.019670886918902397, 0.10779199749231339, -0.04243955761194229, 0.18699054419994354, -0.011512263678014278, -0.11213519424200058, -0.2536850869655609, 0.021806683391332626, -0.01765260472893715, -0.08747660368680954, 0.01506110467016697, 0.0665089413523674, -0.09014441072940826, -0.0588928684592247, 0.0795099288225174, -0.01132340170443058, 0.04246443510055542, -0.27593839168548584, -0.12684126198291779, -0.05297930911183357, -0.1421966552734375, 0.08651168644428253, 0.04035491496324539, 0.008764253929257393, 0.15506891906261444, -0.20897391438484192, 0.004104613792151213, 0.08255259692668915, -0.2538507878780365, 0.05591634660959244, 0.17671173810958862, 0.03623908758163452, 0.18037272989749908, 0.0060391901060938835, 0.11029672622680664, 0.0716743916273117, -0.024263937026262283, -0.17590197920799255, -0.08127854019403458, -0.04696211963891983, 0.16642488539218903, -0.06727185100317001, -0.14248386025428772, 0.34701237082481384, 0.00015008423360995948, 0.009657775051891804, 0.16921205818653107, -0.059524230659008026, -0.09972117841243744, 0.07259953022003174, 0.016484731808304787, 0.018492350354790688, 0.1471305936574936, 0.16307872533798218, -0.0458691343665123, -0.13837823271751404, -0.018630273640155792, -0.22798998653888702, 0.17510560154914856, -0.03248048573732376, 0.13137903809547424, -0.27447956800460815, 0.01684025302529335, -0.2570667266845703, 0.0032130838371813297, 0.04178816080093384, -0.06004921346902847, -0.0226522795855999, -0.013265985064208508, -0.08018817007541656, 0.004899587947875261, 0.06192673370242119, 0.1266920566558838, -0.06128726154565811, 0.06128238886594772, -0.09319206327199936, 0.141696035861969, 0.07166698575019836, 0.07868369668722153, 0.13037432730197906, 0.041205424815416336, -0.07187089323997498, -0.21872246265411377, -0.0026476888451725245, -0.06275863200426102, -0.09502086788415909, -0.0020165652967989445, -0.11606067419052124, 0.17244569957256317, -0.030802514404058456, -0.09825427830219269, -0.11208184063434601, 0.09148659557104111, -0.032992321997880936, -0.03437839448451996, -0.03552987426519394, -0.020977836102247238, 0.019381176680326462, 0.04704452306032181, -0.1548958420753479, -0.005131472367793322, 0.07039852440357208, 0.11502562463283539, -0.1346137970685959, -0.003783059772104025, -0.07908964157104492, 0.03039063885807991, 0.07654735445976257, -0.16510222852230072, 0.03158547356724739, -0.1124754324555397, -0.07531405985355377, 0.002912673633545637, -0.015710093080997467, -0.016202643513679504, 0.166526660323143, -0.0020451415330171585, 0.0714716836810112, -0.026345307007431984, -0.05890209600329399, -0.11243434250354767, -0.08489254862070084, 0.05390460044145584, 0.03670717030763626, 0.03266148269176483, -0.2193479984998703, 0.014805203303694725, -0.12762966752052307, 0.1360815018415451, -0.10566820204257965, -0.04705966264009476, -0.022842247039079666, 0.20562705397605896, 0.037286072969436646, 0.08762791007757187, -0.22171171009540558, 0.039756543934345245, -0.05404696613550186, 0.18480908870697021, -0.1502426266670227, -0.0799463614821434, 0.20813211798667908, -0.07964949309825897, -0.10115210711956024, 0.021235812455415726, 0.020391687750816345, 0.026287272572517395, 0.0766737088561058, 0.4564172327518463, -0.09766800701618195, -0.09146861732006073, 0.10178250074386597, 0.17055274546146393, -0.12427149713039398, -0.1827561855316162, 0.06446871906518936, -0.16666454076766968, -0.1973118633031845, 0.0018917324487119913, 0.09222044050693512, 0.038269978016614914, -0.07875611633062363, -0.020746968686580658, 0.06325206160545349, -0.0007678253459744155, 0.09095914661884308, 0.03755716234445572, 0.09034032374620438, -0.08716782182455063, 0.11115926504135132, -0.05017651244997978, 0.004037132486701012, 0.1343354731798172, 0.027325427159667015, -0.03223329409956932, 0.08694463223218918, -0.0485352948307991, 0.05295134335756302, -0.1662379503250122, -0.15068690478801727, 0.03398871049284935, 0.06283251196146011, 0.03186952322721481, 0.1280253529548645, 0.08141885697841644, -0.10732853412628174, 0.022690722718834877, -0.004228927195072174, 0.058398615568876266, 0.03891623765230179, 0.006107209715992212, 0.008764320984482765, 0.0961301177740097, -0.10607069730758667, -0.13589619100093842, -0.07336436957120895, -0.014715781435370445, 0.14371353387832642, -0.0302802175283432, 0.07690227776765823, -0.004240254405885935, 0.00013200697139836848, 0.06930823624134064, 0.08137880265712738, 0.016412746161222458, 0.08971183747053146, -0.05237193778157234, -0.05160155147314072, 0.10863113403320312, -0.13533565402030945, 0.17837053537368774, 0.14053137600421906, -0.20532016456127167, 0.029453208670020103, -0.06838275492191315, 0.03670361638069153, -0.008162540383636951, 0.0975119024515152, -0.08272241055965424, -0.02106042578816414, 0.013134466484189034, 0.0052274600602686405, -0.013007243163883686, 0.017682146281003952, -0.07295988500118256, -0.07787393033504486, -0.10233919322490692, 0.08436838537454605, 0.11562882363796234, -0.10282530635595322, 0.14214380085468292, 0.4384984076023102, 0.11495281755924225, 0.21582984924316406, -0.09581480920314789, -0.0412987545132637, 0.007486371789127588, 0.0001535322517156601, -0.04476691037416458, 0.08031861484050751, -0.15973517298698425, -0.038901735097169876, 0.027348900213837624, 0.07128690183162689, 0.11475157737731934, -0.14959022402763367, -0.09639324247837067, -0.00793045200407505, 0.0022841424215584993, -0.1249532699584961, 0.023905446752905846, -0.03974650055170059, 0.04015624523162842, 0.07232289016246796, -0.021535737439990044, 0.13939237594604492, -0.04166141897439957, -0.0639561116695404, 0.07585346698760986, -0.2017085999250412, -0.23179671168327332, -0.12309670448303223, -0.14680525660514832, 0.04366797208786011, 0.05154111236333847, 0.01726446859538555, -0.17635835707187653, -0.015074856579303741, 0.07706750929355621, 0.07820965349674225, -0.20886357128620148, -0.022814949974417686, -0.004290030337870121, 0.0895976573228836, -0.10227091610431671, -0.0017130117630586028, -0.04419664293527603, -0.10150232166051865, 0.0017003051470965147, 0.07279510796070099, -0.137485533952713, 0.13807645440101624, 0.21589438617229462, 0.07225540280342102, 0.07359948754310608, -0.019093448296189308, 0.09936179965734482, -0.10856141895055771, -0.16549113392829895, 0.08348225057125092, -0.06234746053814888, 0.047262318432331085, 0.17534415423870087, 0.03307317942380905, -0.13904969394207, -0.015682822093367577, -0.0402069091796875, -0.15603256225585938, -0.238995760679245, -0.09178274869918823, -0.1182505264878273, 0.16442428529262543, 0.0009358620154671371, 0.06651917099952698, 0.08258313685655594, -0.022042419761419296, 0.16447891294956207, -0.07379321753978729, -0.07578866183757782, -0.006978808436542749, 0.12375060468912125, -0.056660156697034836, -0.03080669604241848, -0.10566964000463486, -0.008295975625514984, 0.1151021271944046, 0.15304014086723328, 0.12214863300323486, 0.2957419455051422, 0.08268889784812927, 0.026645636186003685, 0.08958091586828232, 0.17622539401054382, 0.09495089203119278, 0.07838419824838638, -0.045413073152303696, -0.014814783819019794, 0.014317171648144722, -0.04022889584302902, 0.010141594335436821, 0.14683100581169128, -0.2679629921913147, -0.006678564939647913, -0.2710230350494385, 0.0965198427438736, -0.10913380235433578, 0.11837165057659149, -0.01015760749578476, 0.10194015502929688, 0.11082887649536133, 0.03233652561903, -0.03858073800802231, 0.16613617539405823, 0.08450309932231903, -0.11277695000171661, 0.001758623169735074, 0.03737903758883476, 0.09715615212917328, -0.02818971499800682, 0.12721189856529236, -0.11048974841833115, -0.1464834064245224, 0.013753619976341724, 0.07152791321277618, -0.15373679995536804, 0.3138748109340668, 0.012069208547472954, -0.13481520116329193, -0.01481647603213787, -0.09957809001207352, -0.006440147757530212, 0.1254177987575531, 0.09333524852991104, 0.07935678958892822, -0.2185502052307129, -0.13339371979236603, 0.05872276425361633, -0.00575496768578887, 0.22408108413219452, -0.034034017473459244, -0.11356475204229355, -0.027013886719942093, 0.04241163283586502, -0.06043251231312752, 0.08524788916110992, 0.023536119610071182, -0.08113526552915573, -0.032957352697849274, 0.05323701351881027, 0.012368366122245789, 0.00524376705288887, 0.09360801428556442, 0.020107939839363098, -0.0009265501867048442, 0.01785753294825554, 0.047885000705718994, -0.0675911232829094, -0.1984109878540039, 0.09357594698667526, -0.05215044692158699, 0.0015536568826064467, -0.08013670891523361, -0.15122665464878082, -0.08837161958217621, -0.16009655594825745, 0.12540200352668762, -0.034406669437885284, 0.12700119614601135, -0.06619787961244583, 0.17341409623622894, -0.07871770113706589, 0.04481020197272301, -0.047349292784929276, 0.050332702696323395, -0.007268077693879604, -0.07756082713603973, 0.16585899889469147, -0.15564003586769104, 0.01809087023139, 0.19572502374649048, -0.018915493041276932, 0.07177707552909851, 0.021322092041373253, -0.0636206790804863, 0.23147478699684143, 0.3014698624610901, 0.008138049393892288, 0.1665448248386383, 0.3018903136253357, -0.07466315478086472, -0.2642788887023926, -0.05505012720823288, -0.2841376066207886, -0.05371501296758652, 0.10716094076633453, -0.22523896396160126, 0.06986407935619354, 0.14383509755134583, -0.06471995264291763, 0.30228954553604126, -0.21825523674488068, 0.012589273042976856, 0.15434536337852478, -0.08868814259767532, 0.5515313148498535, -0.1133413165807724, -0.17677772045135498, -0.008122089318931103, -0.08741296827793121, 0.10602109134197235, -0.0340677872300148, 0.06877441704273224, 0.013465235009789467, 0.04797380417585373, 0.048932258039712906, -0.03111894056200981, 0.22701001167297363, 0.008710170164704323, 0.09015397727489471, -0.07378865778446198, -0.18624304234981537, 0.11639340221881866, -0.04359482601284981, -0.08891059458255768, 0.0849778801202774, -0.05942516401410103, -0.11078983545303345, 0.04663389176130295, -0.07950539886951447, -0.024862350896000862, 0.08423490077257156, -0.04678233340382576, -0.042606171220541, -0.008054176345467567, -0.1618063747882843, -0.0002289071271661669, 0.31360217928886414, -0.07096036523580551, 0.16695955395698547, 0.03677211329340935, 0.00038613268407061696, -0.11027684062719345, 0.030288029462099075, -0.05203165486454964, -0.021576624363660812, 0.09578979015350342, -0.11096979677677155, 0.03204701095819473, 0.14160704612731934, -0.04864364117383957, 0.05846960097551346, 0.09256096184253693, -0.0849417969584465, 0.007583672646433115, 0.17753590643405914, -0.17537221312522888, -0.1273445188999176, -0.006135711446404457, -0.09862716495990753, 0.14055661857128143, 0.04394126310944557, 0.05191568285226822, 0.16669964790344238, 0.03967129811644554, -0.029474308714270592, -0.02817419543862343, -0.1153380498290062, -0.0201893113553524, 0.040153320878744125, 0.00045633706031367183, -0.08791285753250122, 0.2262638509273529, 0.06409153342247009, -0.1328488290309906, -0.051157206296920776, 0.2161225974559784, -0.06805316358804703, -0.04911920800805092, -0.223562553524971, 0.10752306133508682, -0.07112517952919006, -0.0965060144662857, 0.05453834682703018, -0.02270081453025341, 0.005106312222778797, 0.181985542178154, 0.03941008821129799, 0.11070270836353302, 0.03738937899470329, -0.02448922023177147, 0.15798696875572205, -0.142850860953331, -0.14191335439682007, -0.025354057550430298, -0.08757315576076508, -0.13844476640224457, -0.026804137974977493, 0.1617041826248169, -0.09177309274673462, -0.14772607386112213, -0.2621181011199951, 0.10968475043773651, -0.16432365775108337, -0.10192688554525375, -0.03469514101743698, -0.08968492597341537, 0.0696166530251503, 0.030301768332719803, -0.03093348816037178, -0.06706760823726654, -0.18593791127204895, 0.0816768929362297, 0.06349513679742813, 0.045533183962106705, -0.017847947776317596, 0.0067379772663116455, 0.1720137596130371, 0.025955144315958023, 0.10040043294429779, 0.16762186586856842, 0.011397695168852806, 0.2246655523777008, -0.1671202927827835, -0.11496317386627197, 0.1336962729692459, -0.026543032377958298, 0.06762003898620605, 0.16792191565036774, -0.0772583931684494, 0.015526676550507545, -0.028136352077126503, 0.07066910713911057, -0.11003983020782471, -0.105624258518219, 0.007937257178127766, 0.02567129209637642, -0.2755882740020752, -0.005599735304713249, -0.19717298448085785, 0.14788752794265747, 0.02579621411859989, 0.03297143429517746, 0.10257530212402344, 0.10404334217309952, 0.08312062919139862, -0.0017710148822516203, 0.03226327523589134, -0.1176818460226059, 0.02753005363047123, -0.059239376336336136, -0.020663779228925705, 0.017624232918024063, 0.36952024698257446, -0.03603357449173927, -0.046802736818790436, 0.003710439894348383, 0.1307835876941681, -0.02139742486178875, 0.017395347356796265, 0.13209912180900574, 0.12607666850090027, -0.08595693111419678, -0.1504845917224884, 0.04888554662466049, -0.04565655067563057, -0.02836887165904045, 0.1464131623506546, 0.05905961990356445, 0.1050296202301979, 0.0908031314611435, -0.014463032595813274, -0.00318976235575974, 0.012856799177825451, -0.15486004948616028, 0.06223496049642563, -0.010558074340224266, 0.012565906159579754, 0.017934376373887062, 0.15238402783870697, -0.005540105979889631, 0.07739730179309845, -0.09889880567789078, 0.004208535887300968, -0.13498884439468384, -0.07913459837436676, 0.03617347031831741, -0.13393273949623108, 0.04141177982091904, -0.01871878281235695, 0.029611799865961075, 0.30386561155319214, 0.02558239921927452, -0.020639164373278618, 0.12512871623039246, -0.1214587539434433, -0.12050267308950424, -0.001594188273884356, -0.029960084706544876, 0.0791488066315651, -0.02633434161543846, -0.0997740775346756, -0.1001306027173996, -0.15166029334068298, -0.09759195148944855, 0.05182836204767227, -0.04993441700935364, -0.059362251311540604, -0.17634081840515137, -0.05707859992980957, -0.05147340148687363, 0.14025864005088806, -0.12263951450586319, 0.15159130096435547, -0.014490418136119843, 0.004084470681846142, 0.04405883327126503, 0.1950942426919937, -0.03644494712352753, 0.08714226633310318, 0.0154351145029068, 0.1522706001996994, -0.05119588226079941, 0.14720745384693146, -0.10931728035211563, -0.04014137014746666, -0.06710435450077057, 0.21513493359088898, 0.25630924105644226, -0.06136954948306084, -0.008937356993556023, -0.012760217301547527, 0.058654606342315674, 0.1073930487036705, 0.16049085557460785, 0.002326392102986574, 0.2802925705909729, -0.03133585304021835, 0.04815128445625305, 0.02901598811149597, 0.013607407920062542, -0.06336209923028946, 0.03397751972079277, 0.07539387792348862, -0.035039983689785004, -0.1412304788827896, 0.15837742388248444, -0.21980468928813934, 0.18157227337360382, 0.11640069633722305, -0.19996967911720276, -0.013728445395827293, -0.04882071167230606, 0.1689416468143463, -0.0856364443898201, 0.1637246012687683, -0.0903693437576294, -0.2108195722103119, -0.2056000679731369, 0.03867346793413162, -0.34623071551322937, -0.254462867975235, 0.10422009229660034, 0.1488201916217804, 0.04015883058309555, -0.018507536500692368, -0.019967829808592796, -0.018367022275924683, 0.04877542704343796, -0.0067357709631323814, 0.06014643982052803, 0.031397558748722076, -0.02988368645310402, -0.24127542972564697, -0.029804671183228493, 0.023964406922459602, -0.07093082368373871, 0.07464958727359772, -0.06874357163906097, -0.022495782002806664, 0.08059766888618469, -0.03066304884850979, 0.03298592567443848, -0.035373736172914505, -0.16326889395713806, 0.027529051527380943, 0.03900543600320816, 0.036012712866067886, 0.00634160777553916, 0.0008072225609794259, -0.03455270454287529, 0.0644603744149208, -0.16716794669628143, -0.16015739738941193, 0.14140215516090393, -0.06745140254497528, 0.2779497504234314, -0.05812826007604599, -0.0809100940823555, 0.04766704887151718, -0.03426874056458473, 0.1807648241519928, -0.07756473124027252, 0.047254521399736404, 0.12766779959201813, 0.011127962730824947, 0.03121316432952881, -0.3092964291572571, 0.11082969605922699, -0.000795336440205574, -0.006093299947679043, -0.07581598311662674 ]
b2f0a9724f8a1322f3fe5135c888e705742df9a4
I hand crafted these. For MemGPT function calling.
starsnatched/MemGPT-Functions-DPO
[ "task_categories:conversational", "task_categories:text-generation", "size_categories:1K<n<10K", "language:en", "license:apache-2.0", "memgpt", "function calling", "function", "region:us" ]
2024-01-25T03:01:26+00:00
{"language": ["en"], "license": "apache-2.0", "size_categories": ["1K<n<10K"], "task_categories": ["conversational", "text-generation"], "tags": ["memgpt", "function calling", "function"]}
2024-02-04T21:34:26+00:00
[]
[ "en" ]
TAGS #task_categories-conversational #task_categories-text-generation #size_categories-1K<n<10K #language-English #license-apache-2.0 #memgpt #function calling #function #region-us
I hand crafted these. For MemGPT function calling.
[]
[ "TAGS\n#task_categories-conversational #task_categories-text-generation #size_categories-1K<n<10K #language-English #license-apache-2.0 #memgpt #function calling #function #region-us \n" ]
[ 60 ]
[ "passage: TAGS\n#task_categories-conversational #task_categories-text-generation #size_categories-1K<n<10K #language-English #license-apache-2.0 #memgpt #function calling #function #region-us \n" ]
[ -0.04347169026732445, -0.01612485572695732, -0.003886181628331542, 0.006724764592945576, 0.09412693232297897, 0.07553461194038391, 0.14084090292453766, 0.07779967039823532, 0.07928010821342468, 0.016360893845558167, 0.13538938760757446, 0.042357273399829865, 0.010097142308950424, 0.10851866006851196, 0.004455174319446087, -0.21931320428848267, 0.0727664902806282, -0.12657368183135986, -0.025375371798872948, 0.013123313896358013, 0.09204793721437454, 0.01655847579240799, 0.04861029237508774, -0.053123217076063156, 0.00044326484203338623, 0.01367922592908144, 0.02165839448571205, -0.0893179327249527, 0.07105544209480286, 0.0015542223118245602, -0.014240509830415249, -0.037046339362859726, -0.07285500317811966, -0.27518531680107117, 0.007621143013238907, -0.04224546253681183, 0.0016559886280447245, -0.027330957353115082, 0.0701988935470581, -0.004760586656630039, 0.0570671409368515, -0.0366615355014801, -0.019051168113946915, 0.09474015235900879, -0.09863703697919846, -0.08102872967720032, -0.10407945513725281, 0.016988759860396385, -0.0018195677548646927, 0.08620288223028183, -0.0022142399102449417, 0.10955726355314255, -0.1531994342803955, -0.0008038594387471676, 0.13903066515922546, -0.35494306683540344, 0.017998449504375458, 0.22706255316734314, 0.029573459178209305, 0.08644627034664154, -0.052810151129961014, 0.0001661498099565506, 0.052474625408649445, 0.02739192172884941, -0.06332218647003174, -0.09730726480484009, -0.07029034197330475, 0.0560649111866951, -0.03948593884706497, -0.0911448746919632, 0.3701704740524292, 0.07148587703704834, 0.05683659762144089, -0.09866823256015778, -0.054745279252529144, -0.017826126888394356, -0.06692878156900406, 0.13131695985794067, 0.04426649212837219, 0.10094143450260162, 0.1762712597846985, -0.0666465014219284, -0.10445594787597656, -0.08151224255561829, -0.13118374347686768, -0.0005491641350090504, -0.03180745616555214, 0.026958733797073364, -0.11042614281177521, 0.02916761115193367, -0.1778048872947693, -0.04887547343969345, -0.04226868227124214, -0.028790293261408806, -0.06201831251382828, 0.05244684964418411, 0.055650584399700165, -0.02511177770793438, 0.18671472370624542, 0.062249474227428436, -0.0568702332675457, 0.07654637098312378, -0.06640038639307022, 0.1160939484834671, 0.05056651309132576, 0.15308472514152527, -0.0342983640730381, -0.06659570336341858, 0.01590196043252945, -0.05796357989311218, 0.04371456801891327, -0.02058194950222969, -0.07400910556316376, 0.018697090446949005, -0.06512083113193512, 0.17922915518283844, 0.03194550424814224, 0.06267154961824417, -0.09459206461906433, -0.004612516146153212, 0.14249089360237122, -0.09720470756292343, -0.01726067252457142, 0.06553250551223755, -0.010678749531507492, 0.021449550986289978, -0.024803146719932556, 0.008619965985417366, -0.08018693327903748, -0.027748681604862213, -0.08001458644866943, -0.03728260099887848, -0.03229553997516632, -0.014378239400684834, 0.11792747676372528, -0.001933070831000805, -0.054669324308633804, -0.07068482786417007, -0.20544543862342834, -0.03383622318506241, 0.060424111783504486, -0.05523934215307236, -0.06802651286125183, -0.03705073148012161, -0.06669497489929199, 0.012273851782083511, -0.021443741396069527, 0.12978237867355347, -0.10086925327777863, 0.025168022140860558, -0.11243727058172226, 0.061578717082738876, -0.15344887971878052, 0.06350651383399963, -0.12230454385280609, 0.11016598343849182, -0.16827735304832458, 0.023689502850174904, -0.15209409594535828, 0.1528066098690033, -0.06534120440483093, 0.06312777101993561, -0.06778408586978912, -0.04131215438246727, -0.11531922221183777, 0.12420888990163803, -0.04687606170773506, -0.010234799236059189, 0.0883415937423706, -0.0077740889973938465, -0.12270699441432953, 0.0072206794284284115, 0.034654632210731506, 0.1627432405948639, 0.04111942648887634, 0.32859712839126587, -0.010410243645310402, 0.01931736245751381, 0.06882298737764359, 0.12183573842048645, -0.10509702563285828, -0.13688459992408752, 0.11425118893384933, -0.23058627545833588, -0.03941132128238678, 0.03609698265790939, 0.01678026281297207, 0.03274907171726227, 0.06372396647930145, -0.07878809422254562, 0.0011993683874607086, -0.06147214025259018, 0.07728047668933868, -0.06337200105190277, 0.07123377919197083, -0.101176418364048, 0.03530916944146156, -0.10520482063293457, 0.03145609423518181, 0.034204158931970596, 0.016178173944354057, -0.02517656981945038, -0.029804281890392303, 0.01687280833721161, 0.09180179238319397, -0.14549225568771362, -0.04362133517861366, 0.07225128263235092, 0.004765015095472336, 0.07635615766048431, 0.07644882053136826, 0.06604769825935364, -0.08045683056116104, 0.007674865424633026, 0.03518746793270111, -0.05183550342917442, 0.02758839540183544, -0.016964595764875412, -0.09392048418521881, 0.06928716599941254, -0.042869098484516144, 0.14872834086418152, -0.045753464102745056, 0.0009119915775954723, 0.06276718527078629, -0.021505082026124, 0.0221627876162529, 0.03622832149267197, 0.06357443332672119, -0.03141976147890091, 0.05711871758103371, -0.06070705130696297, 0.079301618039608, 0.013023175299167633, 0.026122165843844414, 0.15101683139801025, -0.18537157773971558, -0.005965096410363913, 0.1439414620399475, 0.020500654354691505, 0.08616884052753448, 0.047817766666412354, -0.015323517844080925, -0.02971760928630829, 0.01974768005311489, -0.07118392735719681, 0.024796919897198677, 0.04512907192111015, 0.09553921222686768, -0.06143798679113388, -0.027392178773880005, -0.01935134269297123, -0.057154182344675064, -0.03900275006890297, 0.04082658886909485, 0.16793948411941528, -0.16493171453475952, 0.12453833222389221, 0.25775444507598877, -0.0004681060090661049, 0.24271747469902039, -0.0555279403924942, -0.020299378782510757, -0.015360113233327866, 0.10541440546512604, -0.024731725454330444, 0.11740460991859436, -0.2925839126110077, 0.04754180461168289, 0.11114414036273956, 0.10232968628406525, 0.12844669818878174, -0.12235157936811447, -0.022530287504196167, -0.07140934467315674, -0.10475082695484161, -0.02865670621395111, 0.05419593304395676, 0.043477416038513184, 0.051403261721134186, 0.05550200492143631, -0.011163797229528427, 0.09523148834705353, -0.010168526321649551, -0.1170094683766365, 0.11799553781747818, -0.2070900797843933, -0.16505740582942963, -0.1626623421907425, 0.03355732932686806, -0.06208846718072891, 0.004839069675654173, 0.05987269803881645, -0.07219420373439789, -0.03671761602163315, -0.05203421786427498, 0.08143353462219238, -0.0793408751487732, -0.08630188554525375, 0.05517648905515671, 0.05901510640978813, -0.048621296882629395, -0.10588452219963074, -0.060441162437200546, 0.012919746339321136, 0.024561122059822083, 0.03890303894877434, -0.1427820473909378, 0.077715203166008, 0.27061450481414795, 0.04025315120816231, 0.03405662626028061, -0.05138809606432915, 0.1386159509420395, -0.04001636058092117, 0.03331097215414047, 0.15706244111061096, 0.05796682834625244, 0.04305730015039444, 0.1940494179725647, -0.0050379429012537, -0.08886803686618805, 0.02415691688656807, 0.028947077691555023, -0.11082641780376434, -0.3125162124633789, -0.1073479875922203, -0.15438182651996613, 0.25220394134521484, 0.015818806365132332, 0.10496662557125092, 0.11639246344566345, 0.014823930338025093, 0.007628198713064194, 0.05630248785018921, 0.02942061796784401, 0.017113856971263885, 0.21524794399738312, -0.05201711505651474, -0.04030214995145798, -0.1368146538734436, 0.007907974533736706, 0.11992430686950684, 0.16023540496826172, 0.19517609477043152, 0.19972816109657288, 0.26293641328811646, 0.09072372317314148, 0.042147740721702576, 0.035604558885097504, 0.08484424650669098, 0.0507650300860405, 0.008616947568953037, -0.05697077140212059, -0.07685916870832443, -0.012486211955547333, 0.04920855909585953, 0.1337892860174179, -0.1585914045572281, -0.0194544680416584, -0.08071165531873703, 0.0875900536775589, 0.18327921628952026, 0.044700298458337784, -0.04873250797390938, 0.0415133535861969, 0.12117468565702438, -0.020395563915371895, -0.0493287667632103, 0.06930824369192123, 0.09857958555221558, -0.1776091605424881, 0.039829321205616, -0.020875681191682816, 0.1095222607254982, 0.0004127630963921547, 0.026182182133197784, -0.08337177336215973, -0.11135028302669525, 0.02071259170770645, 0.16286376118659973, -0.07891713827848434, 0.2249757945537567, 0.030918095260858536, 0.020549165084958076, -0.14417964220046997, 0.017181769013404846, 0.01937406323850155, 0.0378304198384285, 0.05165836960077286, 0.028468981385231018, 0.03344397619366646, 0.04870257526636124, -0.041817259043455124, 0.009765049442648888, 0.04414166137576103, -0.03185585141181946, -0.03761300444602966, -0.0055566225200891495, 0.011285573244094849, -0.02358836494386196, 0.0041804686188697815, -0.11041532456874847, -0.1365634649991989, 0.034754954278469086, 0.1979750394821167, 0.0034822598099708557, -0.0804922878742218, 0.10200807452201843, -0.0021988190710544586, 0.19879260659217834, -0.10396337509155273, -0.05670533701777458, -0.08386003971099854, -0.21828585863113403, 0.07614323496818542, -0.028115179389715195, 0.006453667767345905, -0.1301053762435913, -0.049515530467033386, -0.11310926079750061, -0.0944364070892334, 0.14212264120578766, -0.07904466241598129, -0.035420700907707214, -0.02988123521208763, 0.1892072856426239, -0.15253472328186035, 0.06805039197206497, -0.03250187262892723, -0.0015217401087284088, -0.00876077264547348, -0.15813446044921875, 0.015023251064121723, -0.0020885756239295006, -0.0651223286986351, 0.10041189193725586, -0.04698833450675011, -0.057553306221961975, 0.026684626936912537, -0.0481896810233593, 0.2946105897426605, 0.23777271807193756, -0.03721969947218895, 0.15902340412139893, 0.21206825971603394, -0.048690102994441986, -0.26938849687576294, -0.0769534632563591, -0.13178887963294983, -0.04440896213054657, 0.00566891860216856, -0.1394064575433731, 0.027891255915164948, 0.09077875316143036, -0.07528458535671234, 0.1411668360233307, -0.35976845026016235, -0.10365329682826996, 0.06726956367492676, -0.056186191737651825, 0.15787063539028168, -0.104718878865242, -0.12017524242401123, -0.10402058064937592, -0.13498036563396454, 0.06096586585044861, -0.24423575401306152, 0.12174955010414124, 0.035161785781383514, 0.08656751364469528, -0.04503018409013748, -0.06321604549884796, 0.1986987292766571, 0.06595378369092941, 0.009872592985630035, -0.037723518908023834, -0.1318110227584839, 0.25369200110435486, -0.01653801091015339, 0.12984371185302734, -0.11717446893453598, -0.025220785290002823, -0.13064613938331604, -0.01944907195866108, -0.09422490000724792, 0.04853802174329758, -0.005522195249795914, -0.07876792550086975, -0.07879246026277542, -0.0164040494710207, -0.020297996699810028, 0.0368269607424736, 0.14481383562088013, 0.0030787819996476173, -0.07097572088241577, 0.24671012163162231, 0.028257276862859726, -0.14617013931274414, -0.12786813080310822, -0.07481443881988525, -0.048755500465631485, 0.0868057981133461, -0.20178522169589996, 0.0162668414413929, 0.04029500484466553, -0.03451112285256386, 0.10889843851327896, 0.02392188087105751, -0.11764106154441833, -0.013812916353344917, 0.09897105395793915, -0.012185340747237206, -0.18411290645599365, 0.04846564680337906, 0.04140644893050194, -0.021236654371023178, -0.026020700111985207, 0.09837273508310318, 0.061178531497716904, 0.0674348697066307, 0.019233427941799164, 0.009342013858258724, -0.09909455478191376, 0.12133946269750595, 0.04706721007823944, 0.04889705777168274, -0.13639387488365173, 0.19093731045722961, 0.09938216209411621, -0.08936405181884766, -0.05146429315209389, 0.1904241144657135, -0.10850735008716583, -0.09476461261510849, -0.03794243931770325, 0.09125004708766937, 0.060154858976602554, -0.0851009339094162, -0.04624004662036896, -0.08899614959955215, 0.06648354232311249, -0.024690983816981316, 0.03968941792845726, 0.06512042135000229, -0.04476941376924515, -0.06981348991394043, 0.09801043570041656, 0.01497725024819374, -0.08792659640312195, 0.01033763587474823, -0.1201605349779129, -0.09022670239210129, 0.012470564804971218, 0.10432809591293335, -0.007688877638429403, -0.09038327634334564, -0.11634384840726852, 0.027833525091409683, -0.08867673575878143, 0.011833719909191132, -0.10863390564918518, -0.023724257946014404, 0.022322461009025574, 0.06105256453156471, -0.08729918301105499, -0.009555662982165813, -0.14755304157733917, -0.07933813333511353, -0.10069546848535538, 0.1503126621246338, -0.04432306066155434, -0.014523017220199108, 0.101111501455307, -0.02211567386984825, 0.10966365039348602, 0.03819229453802109, -0.04766780883073807, 0.05995303764939308, -0.13043199479579926, -0.12217868864536285, 0.10519649088382721, 0.0462217852473259, 0.047930456697940826, 0.044957779347896576, -0.07316259294748306, 0.0324275903403759, 0.09972439706325531, 0.08303354680538177, -0.060967132449150085, -0.1842065006494522, 0.019994443282485008, -0.12550479173660278, -0.1483423411846161, 0.01373259723186493, -0.1231079176068306, 0.0444323793053627, -0.03514079749584198, 0.1322411447763443, 0.0025917994789779186, 0.027787823230028152, -0.04545953869819641, 0.014314782805740833, -0.04664863646030426, -0.022623486816883087, -0.043092407286167145, -0.01567850261926651, -0.021993856877088547, 0.0013493048027157784, 0.21966277062892914, 0.0175238735973835, -0.08289627730846405, 0.07617658376693726, 0.08638322353363037, 0.03850459307432175, 0.03062247857451439, 0.19550621509552002, 0.1076962947845459, -0.005211939569562674, -0.047223083674907684, 0.0030246498063206673, 0.05335232615470886, 0.001516503980383277, 0.22812873125076294, 0.1238565668463707, 0.08967895060777664, 0.11358129978179932, 0.06007756292819977, -0.07141545414924622, -0.04236556962132454, 0.09575953334569931, 0.07604218274354935, 0.07784189283847809, -0.07397544384002686, 0.04539334028959274, 0.1966334730386734, -0.1046561449766159, 0.0541066974401474, -0.012711845338344574, -0.04323253035545349, -0.05164593830704689, -0.06010190770030022, -0.05242142081260681, -0.17175135016441345, -0.012425648979842663, -0.02855011448264122, 0.05727367848157883, 0.21241068840026855, 0.002240548375993967, -0.013876646757125854, 0.15155720710754395, -0.26570776104927063, -0.10565020143985748, 0.004166845232248306, -0.06514093279838562, 0.04445945844054222, -0.06380341947078705, -0.019195009022951126, -0.007496272213757038, -0.07118771970272064, 0.023056399077177048, 0.10196828842163086, -0.04878463223576546, -0.026131058111786842, -0.13069765269756317, -0.0419839508831501, -0.031059667468070984, 0.0736522376537323, -0.03848547115921974, 0.20499832928180695, 0.07524406909942627, -0.06255652010440826, 0.08792217075824738, 0.10364338010549545, -0.09364756941795349, -0.26281964778900146, -0.1063499003648758, 0.06658904999494553, 0.007251935079693794, 0.10843491554260254, -0.04545518010854721, -0.07813361287117004, -0.10009503364562988, 0.22856280207633972, 0.29784461855888367, -0.08484724909067154, -0.02296232059597969, -0.07213789969682693, 0.02306518331170082, -0.04127703234553337, 0.1381479799747467, 0.01114083081483841, 0.2362038791179657, -0.012814191170036793, 0.020656175911426544, -0.027846861630678177, -0.07876425236463547, -0.2525215744972229, 0.051986053586006165, 0.03801906481385231, -0.0719863772392273, -0.01630069687962532, 0.15301278233528137, -0.08558497577905655, 0.0635441392660141, -0.007454348728060722, -0.07520382851362228, -0.08910642564296722, -0.004688223358243704, 0.08298932015895844, 0.001881137490272522, 0.10365165770053864, -0.02877677232027054, -0.09943787008523941, -0.059770699590444565, 0.0249492097645998, -0.282791405916214, -0.09193863719701767, 0.07374458014965057, -0.037587352097034454, 0.21845369040966034, -0.019749080762267113, 0.09002918004989624, 0.09336385875940323, -0.035613179206848145, -0.09286165982484818, 0.16531765460968018, 0.11179466545581818, -0.03133853152394295, 0.021070199087262154, -0.055960241705179214, -0.038644373416900635, -0.009668037295341492, 0.04388027265667915, -0.08299334347248077, 0.013281766325235367, 0.21196651458740234, 0.013683974742889404, -0.12204775959253311, -0.06899725645780563, -0.1262902021408081, 0.035584546625614166, 0.023936519399285316, 0.0017598439007997513, -0.03188816457986832, 0.023647068068385124, 0.025178562849760056, 0.029596857726573944, -0.13642284274101257, -0.1418249011039734, -0.04173234850168228, -0.05663702264428139, -0.05433572828769684, 0.05439123511314392, -0.17659910023212433, -0.024958709254860878, -0.06108441948890686, 0.1494116187095642, -0.04538971930742264, 0.06014418601989746, 0.09051339328289032, -0.07411905378103256, 0.00009700050577521324, -0.2968296408653259, -0.02624010667204857, 0.07516146451234818, -0.03667723387479782, -0.10185551643371582 ]
536e1ced806b067d160f4e76fb5b36bd61888bce
# Dataset Card for Dataset Name <!-- Provide a quick summary of the dataset. --> This dataset card aims to be a base template for new datasets. It has been generated using [this raw template](https://github.com/huggingface/huggingface_hub/blob/main/src/huggingface_hub/templates/datasetcard_template.md?plain=1). ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
neenax/explanation_gpt
[ "region:us" ]
2024-01-25T03:27:33+00:00
{}
2024-01-25T03:28:01+00:00
[]
[]
TAGS #region-us
# Dataset Card for Dataset Name This dataset card aims to be a base template for new datasets. It has been generated using this raw template. ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Dataset Name\n\n\n\nThis dataset card aims to be a base template for new datasets. It has been generated using this raw template.", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Dataset Name\n\n\n\nThis dataset card aims to be a base template for new datasets. It has been generated using this raw template.", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ 6, 34, 4, 40, 29, 3, 4, 9, 6, 5, 7, 4, 7, 10, 9, 5, 9, 8, 10, 46, 8, 7, 10, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Dataset Name\n\n\n\nThis dataset card aims to be a base template for new datasets. It has been generated using this raw template.## Dataset Details### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:## Uses### Direct Use### Out-of-Scope Use## Dataset Structure## Dataset Creation### Curation Rationale### Source Data#### Data Collection and Processing#### Who are the source data producers?### Annotations [optional]#### Annotation process#### Who are the annotators?#### Personal and Sensitive Information## Bias, Risks, and Limitations### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:## Glossary [optional]## More Information [optional]## Dataset Card Authors [optional]## Dataset Card Contact" ]
[ -0.045137420296669006, 0.2356591522693634, -0.003386062802746892, 0.037927258759737015, 0.09696394950151443, 0.01332013588398695, 0.05346196889877319, 0.12374898791313171, 0.006795566063374281, 0.14796778559684753, 0.0668143481016159, 0.12131187319755554, 0.13325916230678558, 0.18678297102451324, -0.05108675733208656, -0.16099153459072113, 0.0581800639629364, -0.04485415667295456, 0.10912185907363892, 0.13665661215782166, 0.12349934875965118, -0.12220652401447296, 0.07425005733966827, -0.06832703202962875, -0.06193598359823227, -0.028017830103635788, -0.056349460035562515, -0.03317961096763611, 0.05196759104728699, 0.02836473099887371, 0.05896558612585068, 0.04607880115509033, 0.04236457869410515, -0.3016852140426636, 0.027975963428616524, 0.055417586117982864, 0.003548306878656149, 0.07276767492294312, 0.057298507541418076, -0.11911769956350327, 0.086423859000206, -0.13508564233779907, 0.11021291464567184, 0.07013335078954697, -0.12130887061357498, -0.198300302028656, -0.13521860539913177, 0.06626161187887192, 0.08458279073238373, 0.004980290774255991, -0.05270589515566826, 0.13964349031448364, -0.02092166431248188, 0.013813171535730362, 0.13743796944618225, -0.034678082913160324, -0.02705211378633976, 0.03608553856611252, 0.04552284628152847, 0.08105316758155823, -0.1158563643693924, 0.03550960123538971, 0.07944474369287491, 0.03154982998967171, 0.08958618342876434, 0.0027435196097940207, 0.05697716027498245, 0.03106646053493023, -0.14918099343776703, -0.11032826453447342, 0.2108081877231598, 0.010249480605125427, -0.04834945499897003, -0.21350215375423431, 0.0031974976882338524, 0.028305409476161003, -0.004651887807995081, -0.050135646015405655, 0.006265797186642885, -0.06577928364276886, 0.08634109050035477, -0.0325600765645504, -0.050808656960725784, -0.0496935211122036, 0.07001912593841553, -0.029126791283488274, 0.01768539659678936, -0.0031917819287627935, 0.03323373198509216, 0.08493667840957642, 0.06810230016708374, -0.10994671285152435, -0.08850465714931488, -0.0864754468202591, -0.062490351498126984, -0.03447577729821205, 0.056573349982500076, 0.07378017902374268, 0.06580189615488052, 0.16862694919109344, -0.0005548892077058554, 0.005788953509181738, -0.07531256973743439, 0.02114880084991455, 0.1744067370891571, -0.09364598989486694, -0.08349609375, -0.09049136191606522, -0.0029291193932294846, 0.09400572627782822, 0.06329739093780518, -0.0548616461455822, -0.024275194853544235, 0.037011489272117615, 0.06969466060400009, 0.1555967479944229, 0.12228161096572876, -0.003742769593372941, -0.08228208124637604, -0.048118703067302704, 0.16659203171730042, -0.13598115742206573, 0.0033598935697227716, 0.013743046671152115, -0.06697069853544235, -0.05760418623685837, -0.011579235084354877, 0.004749522544443607, -0.05802354961633682, 0.08218017220497131, -0.06362786889076233, -0.04596565663814545, -0.083063505589962, -0.0684540644288063, 0.08365601301193237, -0.06790696084499359, -0.0020750192925333977, -0.007743047084659338, -0.1658775955438614, -0.0364471934735775, 0.037609562277793884, -0.10754745453596115, -0.07524935901165009, -0.029470039531588554, 0.003922081086784601, 0.008057908155024052, -0.06093813106417656, 0.14348062872886658, -0.06149641051888466, 0.07377603650093079, -0.038128796964883804, 0.070264533162117, 0.07147649675607681, 0.0356515496969223, -0.09809432178735733, 0.08067943900823593, -0.0856197327375412, 0.0871007889509201, -0.10541194677352905, -0.04167390987277031, -0.17389336228370667, -0.05556480213999748, 0.058056075125932693, 0.005124279763549566, -0.0322038009762764, 0.15786194801330566, -0.1933104693889618, 0.013416864909231663, 0.16153815388679504, -0.1221611425280571, -0.13163845241069794, 0.06146198883652687, -0.10501564294099808, 0.09334293752908707, 0.04285242408514023, 0.03765387088060379, 0.03615404665470123, -0.07198930531740189, -0.08118181675672531, -0.06932874768972397, -0.03143133223056793, 0.11901089549064636, 0.07812768965959549, -0.038906119763851166, -0.006061446852982044, 0.005147600080817938, 0.041747644543647766, -0.1187840923666954, -0.03508825972676277, -0.10631685703992844, -0.022532694041728973, -0.055868182331323624, 0.027024762704968452, -0.007970322854816914, -0.13841485977172852, -0.02696978859603405, -0.08881731331348419, -0.02611265890300274, 0.0683046504855156, -0.023014262318611145, -0.032666224986314774, -0.050080787390470505, 0.08811536431312561, -0.06204160675406456, -0.0021088095381855965, -0.17163635790348053, -0.0780714824795723, -0.0019496902823448181, -0.12484267354011536, 0.04482617974281311, -0.02157217264175415, -0.0015320879174396396, 0.024096792563796043, -0.03648797422647476, 0.018886692821979523, 0.08522745221853256, 0.019642753526568413, 0.035498809069395065, -0.196628600358963, -0.02656560391187668, -0.08866303414106369, 0.10184517502784729, -0.20759117603302002, 0.013747422024607658, 0.1331479251384735, 0.11667529493570328, 0.0061835334636271, -0.053499430418014526, 0.11413617432117462, -0.02272549457848072, -0.053345050662755966, -0.06468462198972702, 0.026800954714417458, -0.06584332883358002, -0.013338473625481129, 0.02208191342651844, -0.20203477144241333, -0.07357744872570038, 0.11917761713266373, 0.05796968564391136, -0.11674179881811142, -0.18663983047008514, -0.04360160976648331, -0.029816562309861183, -0.0771046057343483, -0.032228630036115646, -0.009250624105334282, 0.038151245564222336, 0.013963179662823677, -0.11170481890439987, -0.05930870398879051, 0.03753786161541939, -0.0032708982471376657, -0.08123735338449478, 0.07034216821193695, 0.07219453901052475, -0.16661885380744934, 0.1544291377067566, 0.02459692396223545, 0.15249836444854736, 0.05236130952835083, 0.02910313382744789, -0.10687243193387985, -0.003284899052232504, 0.0368906669318676, 0.007810608949512243, 0.14636372029781342, -0.05795472115278244, 0.05143653601408005, 0.09586719423532486, -0.046540021896362305, -0.0033584071788936853, -0.07896757870912552, -0.00292202434502542, 0.02282949909567833, -0.025976115837693214, -0.07243786752223969, -0.03806864470243454, 0.020917467772960663, 0.12756934762001038, 0.04665278643369675, 0.09014569967985153, 0.04768356308341026, -0.042445484548807144, -0.11655405163764954, 0.15046828985214233, -0.0663347989320755, -0.27987977862358093, -0.09213891625404358, -0.15668588876724243, 0.0026228236965835094, -0.0002053034695563838, 0.04674670845270157, -0.0014678710140287876, -0.09068931639194489, -0.08856036514043808, 0.019469691440463066, -0.002978171221911907, -0.14461064338684082, -0.11038630455732346, 0.016582177951931953, 0.024680515751242638, -0.10369550436735153, 0.042323291301727295, 0.058498822152614594, -0.013857671990990639, 0.04138798266649246, 0.07394252717494965, 0.17408615350723267, 0.1095517948269844, 0.016551319509744644, -0.011534024029970169, -0.02636626735329628, 0.22048643231391907, -0.12298805266618729, 0.04242166131734848, 0.11735091358423233, -0.09191753715276718, 0.0852767825126648, 0.24133837223052979, 0.02426740899682045, -0.10329335927963257, 0.013360339216887951, 0.07184069603681564, -0.06494739651679993, -0.2168896198272705, -0.09725654125213623, -0.02726377360522747, -0.06353669613599777, 0.13771623373031616, 0.0658821240067482, 0.06569377332925797, 0.10487405955791473, -0.14135634899139404, -0.07025887072086334, -0.001293723937124014, 0.1069253608584404, 0.012236167676746845, -0.0031113456934690475, 0.06986989080905914, -0.021477151662111282, -0.002758954418823123, 0.10845231264829636, 0.09377209097146988, 0.23434598743915558, -0.009687232784926891, 0.17432454228401184, 0.112513966858387, 0.1074807345867157, 0.008898037485778332, 0.0459112673997879, -0.000536871375516057, 0.07111731916666031, -0.05114373564720154, -0.08135142177343369, -0.052351467311382294, 0.09022720903158188, 0.03008229471743107, 0.02582869492471218, 0.007461265195161104, -0.12549374997615814, 0.045920033007860184, 0.08571504056453705, 0.04199519753456116, -0.2176574170589447, 0.006254988256841898, 0.10183800011873245, 0.005896646995097399, -0.14760084450244904, -0.004402214661240578, 0.030156852677464485, -0.15221315622329712, 0.09557132422924042, -0.04644684121012688, 0.11425463110208511, -0.14161019027233124, -0.03694430738687515, -0.07688193023204803, 0.01616855524480343, -0.058841586112976074, 0.17248395085334778, -0.22961507737636566, 0.18067078292369843, 0.028473425656557083, 0.03463932126760483, -0.14699313044548035, 0.02543802000582218, -0.007703389972448349, -0.03054971992969513, 0.22957342863082886, -0.01938932202756405, -0.11073271930217743, -0.07724873721599579, -0.1053021103143692, -0.008434874005615711, 0.07119039446115494, -0.05074722319841385, 0.11830257624387741, 0.01241130381822586, -0.0042715915478765965, -0.022164829075336456, -0.11537797749042511, -0.13657572865486145, -0.24551646411418915, 0.06471220403909683, -0.10537919402122498, 0.010301818139851093, -0.031800393015146255, -0.03632088378071785, 0.04851237311959267, 0.13535690307617188, -0.15626473724842072, -0.11047632992267609, -0.16003349423408508, 0.03162528574466705, 0.11888782680034637, -0.027766751125454903, 0.044484078884124756, 0.00036935057141818106, 0.21589821577072144, -0.020383279770612717, -0.032629113644361496, 0.022175999358296394, -0.0554337352514267, -0.16109584271907806, -0.06320400536060333, 0.11442632973194122, 0.08304296433925629, 0.04978165775537491, 0.017787059769034386, 0.053934574127197266, 0.003878477029502392, -0.1104343831539154, 0.08308431506156921, 0.08327805250883102, 0.20008181035518646, 0.09933704137802124, -0.012864740565419197, -0.17245203256607056, -0.13361091911792755, -0.047003746032714844, 0.11035556346178055, 0.22219404578208923, -0.04970895126461983, 0.1681893765926361, 0.15544220805168152, -0.0867634117603302, -0.22143873572349548, 0.019805220887064934, -0.05573005601763725, -0.046988606452941895, 0.0472070686519146, -0.21148261427879333, 0.012673061341047287, 0.025648225098848343, -0.03335797041654587, 0.16201242804527283, -0.1479925811290741, -0.10676304996013641, 0.08806756138801575, 0.1116725280880928, -0.1628931313753128, -0.13353268802165985, -0.11930481344461441, -0.06854643672704697, -0.12218685448169708, 0.20802833139896393, -0.03393654152750969, -0.0019240167457610369, -0.011691785417497158, 0.07938166707754135, 0.03563496842980385, -0.06559523195028305, 0.18504053354263306, -0.02819136530160904, 0.06327532976865768, -0.11479561775922775, -0.08004513382911682, 0.009419838897883892, -0.050009068101644516, 0.10174044966697693, -0.0184676181524992, 0.0025846504140645266, -0.09153643995523453, -0.08405964076519012, -0.05816395953297615, 0.041940607130527496, -0.07301881909370422, -0.10008954256772995, -0.09972129017114639, 0.07101812213659286, 0.10912472009658813, -0.037684857845306396, -0.04455266520380974, -0.10615118592977524, 0.04547824710607529, 0.18798980116844177, 0.21099112927913666, 0.05441680923104286, -0.05270736292004585, -0.029242245480418205, -0.05649366229772568, 0.04089951142668724, -0.16854724287986755, 0.027913745492696762, 0.07870364934206009, 0.042047493159770966, 0.13367827236652374, -0.020897381007671356, -0.16421911120414734, -0.0062937187030911446, 0.04474855586886406, -0.10340121388435364, -0.1996934562921524, 0.028227072209119797, 0.07734331488609314, -0.19811414182186127, -0.10042684525251389, 0.021209653466939926, 0.009532895870506763, -0.037424203008413315, 0.0055816443637013435, 0.10451291501522064, 0.05196935683488846, 0.06854663789272308, 0.10219012945890427, 0.09635642915964127, -0.1332344263792038, 0.05610734224319458, 0.10705510526895523, -0.1547919511795044, 0.06279706209897995, -0.022028394043445587, -0.05008110776543617, -0.05082961544394493, -0.003672907594591379, 0.09230206161737442, 0.007192645221948624, -0.08851578831672668, 0.058265574276447296, -0.09461028128862381, 0.04709829017519951, 0.1586991846561432, 0.009623306803405285, 0.006439116783440113, 0.05509781837463379, -0.026338567957282066, -0.14387385547161102, 0.13013698160648346, 0.02594594471156597, 0.04672935605049133, -0.03439018502831459, -0.047196682542562485, 0.04134044423699379, 0.03224373236298561, -0.0403311625123024, -0.023881662636995316, -0.033670417964458466, -0.002336320700123906, -0.19716788828372955, 0.075133316218853, -0.09778637439012527, 0.004976307973265648, -0.010060247965157032, -0.024967487901449203, -0.01595233753323555, -0.0009088207734748721, -0.0643581822514534, -0.007871215231716633, 0.008185098879039288, 0.07373857498168945, -0.22904253005981445, -0.032123710960149765, 0.10283848643302917, -0.0668208971619606, 0.07784179598093033, -0.03228393569588661, -0.06781799346208572, -0.005834529642015696, -0.1414446234703064, -0.013206074014306068, 0.011844679713249207, 0.03611711785197258, 0.03815842047333717, -0.18241731822490692, 0.001921891700476408, 0.026736345142126083, -0.09501869231462479, 0.026276685297489166, -0.0037942840717732906, -0.11023249477148056, 0.05527083948254585, 0.01327609084546566, -0.04645448550581932, -0.00816764310002327, 0.08596232533454895, 0.10982909053564072, -0.06038665771484375, 0.13376328349113464, 0.008027713745832443, 0.060735780745744705, -0.13897259533405304, -0.02884409762918949, -0.042513035237789154, -0.036519624292850494, -0.02062707021832466, 0.04945720359683037, 0.04667249321937561, -0.0246462170034647, 0.34044820070266724, -0.026327745988965034, -0.032267503440380096, 0.031958650797605515, 0.021428270265460014, -0.0015269764699041843, 0.038731250911951065, 0.12376771122217178, -0.0022587324492633343, -0.025246839970350266, 0.0424770824611187, 0.004177299328148365, -0.024945005774497986, -0.019959790632128716, 0.12274960428476334, 0.16207042336463928, 0.17013563215732574, -0.07579728215932846, 0.030480869114398956, -0.14412783086299896, 0.02930755354464054, 0.06938403099775314, 0.04170135781168938, 0.010978822596371174, 0.012322459369897842, 0.08170638233423233, 0.14693978428840637, -0.13385778665542603, 0.07766985148191452, -0.06612381339073181, -0.08801031112670898, -0.10776570439338684, -0.17813287675380707, -0.06771975755691528, 0.027990156784653664, 0.00697934627532959, -0.10769613087177277, 0.057141177356243134, 0.17983262240886688, -0.0198719073086977, -0.011329867877066135, 0.07762133330106735, -0.012629049830138683, 0.022223634645342827, 0.006615813821554184, 0.05557025223970413, -0.00436778087168932, -0.01684851571917534, 0.0476214773952961, 0.05154561996459961, 0.03823886811733246, 0.08023256063461304, 0.0573124922811985, 0.06331434845924377, 0.02545194886624813, -0.0872940719127655, -0.08784031122922897, 0.0012072141980752349, 0.025641098618507385, 0.005108901299536228, 0.19227181375026703, 0.019968969747424126, 0.03417821228504181, -0.00324063072912395, 0.18674969673156738, 0.006189278792589903, -0.026209011673927307, -0.09535994380712509, 0.1340039223432541, -0.04426201432943344, 0.02782554365694523, 0.04789311811327934, -0.1230020597577095, 0.08424301445484161, 0.1149173453450203, 0.19348587095737457, -0.042289987206459045, -0.017477447167038918, -0.03336693346500397, 0.028903169557452202, 0.015338397584855556, 0.05017773061990738, 0.021514931693673134, 0.11186915636062622, -0.09036347270011902, 0.06029686704277992, -0.019170187413692474, -0.019879905506968498, -0.01768581010401249, 0.13625779747962952, 0.06227348744869232, 0.011329368688166142, -0.09565792977809906, 0.11864724010229111, -0.08884361386299133, -0.1568283587694168, 0.019207997247576714, -0.04839545115828514, -0.15044184029102325, -0.002593002747744322, -0.036523036658763885, 0.012575212866067886, -0.022335601970553398, 0.020625922828912735, -0.010066535323858261, 0.12391169369220734, 0.046448905020952225, -0.14777137339115143, -0.03359858691692352, 0.09517894685268402, -0.025122877210378647, 0.32199496030807495, 0.01896335370838642, 0.04964707791805267, 0.0965663343667984, -0.09443532675504684, -0.1571168750524521, 0.030515572056174278, 0.09792692959308624, 0.071408711373806, 0.08876733481884003, 0.2145536243915558, 0.00008815753244562075, 0.051584478467702866, 0.08924662321805954, -0.04852227866649628, 0.021618209779262543, -0.03762529790401459, -0.05682588741183281, -0.08188659697771072, 0.08487740159034729, -0.07417124509811401, 0.11504659801721573, 0.07239633798599243, -0.047641340643167496, 0.017316296696662903, -0.029349245131015778, 0.0699370801448822, -0.0003027287020813674, 0.15188071131706238, 0.07300716638565063, -0.13998641073703766, 0.0016997330822050571, 0.035709913820028305, 0.06756433099508286, -0.3328336775302887, -0.020278848707675934, 0.03241622820496559, -0.00092127057723701, -0.031663648784160614, 0.0964314267039299, 0.01917295716702938, 0.059096843004226685, -0.08434086292982101, -0.03127289190888405, -0.010820526629686356, 0.1418859213590622, -0.0762174129486084, 0.01715315878391266 ]
426f628f6b23751c502fcd61b509e5ecdc53dbe9
# ChaosBench We propose ChaosBench, a large-scale, multi-channel, physics-based benchmark for subseasonal-to-seasonal (S2S) climate prediction. It is framed as a high-dimensional video regression task that consists of 45-year, 60-channel observations for validating physics-based and data-driven models, and training the latter. Physics-based forecasts are generated from 4 national weather agencies with 44-day lead-time and serve as baselines to data-driven forecasts. Our benchmark is one of the first to incorporate physics-based metrics to ensure physically-consistent and explainable models. We establish two tasks: full and sparse dynamics prediction. 🔗: [https://leap-stc.github.io/ChaosBench/](https://leap-stc.github.io/ChaosBench/) 📚: [https://arxiv.org/abs/2402.00712](https://arxiv.org/abs/2402.00712) ## Getting Started **Step 1**: Clone the [ChaosBench](https://github.com/leap-stc/ChaosBench) Github repository **Step 2**: Create local directory to store your data, e.g., ``` cd ChaosBench mkdir data ``` **Step 3**: Navigate to `chaosbench/config.py` and change the field `DATA_DIR = ChaosBench/data` **Step 4**: Initialize the space by running ``` cd ChaosBench/data/ wget https://huggingface.co/datasets/LEAP/ChaosBench/resolve/main/process.sh chmod +x process.sh ``` **Step 5**: Download the data ``` # NOTE: you can also run each line one at a time to retrieve individual dataset ./process.sh era5 # Required: For input ERA5 data ./process.sh climatology # Required: For climatology ./process.sh ukmo # Optional: For simulation from UKMO ./process.sh ncep # Optional: For simulation from NCEP ./process.sh cma # Optional: For simulation from CMA ./process.sh ecmwf # Optional: For simulation from ECMWF ``` ## Dataset Overview - __Input:__ ERA5 Reanalysis (1979-2023) - __Target:__ The following table indicates the 48 variables (channels) that are available for Physics-based models. Note that the __Input__ ERA5 observations contains __ALL__ fields, including the unchecked boxes: Parameters/Levels (hPa) | 1000 | 925 | 850 | 700 | 500 | 300 | 200 | 100 | 50 | 10 :---------------------- | :----| :---| :---| :---| :---| :---| :---| :---| :--| :-| Geopotential height, z ($gpm$) | &check; | &check; | &check; | &check; | &check; | &check; | &check; | &check; | &check; | &check; | Specific humidity, q ($kg kg^{-1}$) | &check; | &check; | &check; | &check; | &check; | &check; | &check; | &nbsp; | &nbsp; | &nbsp; | Temperature, t ($K$) | &check; | &check; | &check; | &check; | &check; | &check; | &check; | &check; | &check; | &check; | &check; | U component of wind, u ($ms^{-1}$) | &check; | &check; | &check; | &check; | &check; | &check; | &check; | &check; | &check; | &check; | V component of wind, v ($ms^{-1}$) | &check; | &check; | &check; | &check; | &check; | &check; | &check; | &check; | &check; | &check; | Vertical velocity, w ($Pas^{-1}$) | &nbsp; | &nbsp; | &nbsp; | &nbsp; | &check; | &nbsp; | &nbsp; | &nbsp; | &nbsp; | &nbsp; | - __Baselines:__ - Physics-based models: - [x] UKMO: UK Meteorological Office - [x] NCEP: National Centers for Environmental Prediction - [x] CMA: China Meteorological Administration - [x] ECMWF: European Centre for Medium-Range Weather Forecasts - Data-driven models: - [x] Lagged-Autoencoder - [x] Fourier Neural Operator (FNO) - [x] ResNet - [x] UNet - [x] ViT/ClimaX - [x] PanguWeather - [x] Fourcastnetv2 - [x] GraphCast ## Evaluation Metrics We divide our metrics into 2 classes: (1) ML-based, which cover evaluation used in conventional computer vision and forecasting tasks, (2) Physics-based, which are aimed to construct a more physically-faithful and explainable data-driven forecast. - __Vision-based:__ - [x] RMSE - [x] Bias - [x] Anomaly Correlation Coefficient (ACC) - [x] Multiscale Structural Similarity Index (MS-SSIM) - __Physics-based:__ - [x] Spectral Divergence (SpecDiv) - [x] Spectral Residual (SpecRes) ## Leaderboard You can access the full score and checkpoints in `logs/<MODEL_NAME>` within the following subdirectory: - Scores: `eval/<METRIC>.csv` - Model checkpoints: `lightning_logs/`
LEAP/ChaosBench
[ "license:gpl-3.0", "arxiv:2402.00712", "region:us" ]
2024-01-25T04:17:24+00:00
{"license": "gpl-3.0", "viewer": false}
2024-02-12T23:27:43+00:00
[ "2402.00712" ]
[]
TAGS #license-gpl-3.0 #arxiv-2402.00712 #region-us
ChaosBench ========== We propose ChaosBench, a large-scale, multi-channel, physics-based benchmark for subseasonal-to-seasonal (S2S) climate prediction. It is framed as a high-dimensional video regression task that consists of 45-year, 60-channel observations for validating physics-based and data-driven models, and training the latter. Physics-based forecasts are generated from 4 national weather agencies with 44-day lead-time and serve as baselines to data-driven forecasts. Our benchmark is one of the first to incorporate physics-based metrics to ensure physically-consistent and explainable models. We establish two tasks: full and sparse dynamics prediction. : URL : URL Getting Started --------------- Step 1: Clone the ChaosBench Github repository Step 2: Create local directory to store your data, e.g., Step 3: Navigate to 'chaosbench/URL' and change the field 'DATA\_DIR = ChaosBench/data' Step 4: Initialize the space by running Step 5: Download the data Dataset Overview ---------------- * **Input:** ERA5 Reanalysis (1979-2023) * **Target:** The following table indicates the 48 variables (channels) that are available for Physics-based models. Note that the **Input** ERA5 observations contains **ALL** fields, including the unchecked boxes: * **Baselines:** + Physics-based models: - [x] UKMO: UK Meteorological Office - [x] NCEP: National Centers for Environmental Prediction - [x] CMA: China Meteorological Administration - [x] ECMWF: European Centre for Medium-Range Weather Forecasts + Data-driven models: - [x] Lagged-Autoencoder - [x] Fourier Neural Operator (FNO) - [x] ResNet - [x] UNet - [x] ViT/ClimaX - [x] PanguWeather - [x] Fourcastnetv2 - [x] GraphCast Evaluation Metrics ------------------ We divide our metrics into 2 classes: (1) ML-based, which cover evaluation used in conventional computer vision and forecasting tasks, (2) Physics-based, which are aimed to construct a more physically-faithful and explainable data-driven forecast. * **Vision-based:** + [x] RMSE + [x] Bias + [x] Anomaly Correlation Coefficient (ACC) + [x] Multiscale Structural Similarity Index (MS-SSIM) * **Physics-based:** + [x] Spectral Divergence (SpecDiv) + [x] Spectral Residual (SpecRes) Leaderboard ----------- You can access the full score and checkpoints in 'logs/<MODEL\_NAME>' within the following subdirectory: * Scores: 'eval/.csv' * Model checkpoints: 'lightning\_logs/'
[]
[ "TAGS\n#license-gpl-3.0 #arxiv-2402.00712 #region-us \n" ]
[ 22 ]
[ "passage: TAGS\n#license-gpl-3.0 #arxiv-2402.00712 #region-us \n" ]
[ -0.006069590337574482, 0.09344310313463211, -0.006409067194908857, 0.02137855440378189, 0.010997242294251919, 0.06064131483435631, 0.18903130292892456, 0.062380317598581314, 0.1337033361196518, -0.0457453653216362, 0.1919642984867096, 0.09846936911344528, 0.021265454590320587, 0.02173786610364914, 0.019494613632559776, -0.11002377420663834, 0.041081055998802185, -0.02665954828262329, 0.10586942732334137, 0.04148401692509651, 0.025091558694839478, -0.012416806071996689, 0.03773963451385498, -0.005652275867760181, -0.11197645962238312, 0.014544349163770676, 0.0847686380147934, -0.05274668708443642, 0.09737758338451385, 0.006699209101498127, 0.04481489956378937, 0.05044514313340187, 0.045748021453619, -0.20468765497207642, -0.013406604528427124, -0.07548311352729797, -0.12610243260860443, 0.04665539786219597, 0.0480252243578434, 0.06191018596291542, 0.1142578050494194, 0.10777585208415985, -0.07111845910549164, 0.030476579442620277, -0.19656944274902344, -0.25484928488731384, -0.16378666460514069, 0.09602154046297073, 0.015210416167974472, 0.0912587121129036, 0.10099916905164719, 0.15038332343101501, -0.06639520078897476, 0.0086684450507164, 0.19378939270973206, -0.4055575728416443, 0.06203387305140495, 0.23587186634540558, 0.02761208266019821, 0.008770712651312351, -0.014973876997828484, 0.09590839594602585, 0.08959324657917023, -0.01909572258591652, -0.09369576722383499, -0.06839419156312943, -0.0576583594083786, 0.13108207285404205, 0.0036054232623428106, -0.09579948335886002, 0.2822227478027344, 0.039863839745521545, -0.0483941026031971, 0.09291216731071472, 0.022992301732301712, -0.08252307027578354, 0.021854043006896973, 0.06409668922424316, 0.060608528554439545, 0.1237013190984726, 0.10108856111764908, -0.03948051109910011, -0.16381260752677917, -0.09876909852027893, -0.20526933670043945, 0.116483174264431, -0.032235659658908844, 0.13514140248298645, -0.11084000021219254, 0.026940394192934036, -0.16378094255924225, 0.015753477811813354, -0.062260329723358154, -0.06380434334278107, 0.15429426729679108, 0.057502057403326035, -0.03879402205348015, 0.1506654918193817, 0.09281767904758453, 0.17435874044895172, -0.016531195491552353, 0.008099802769720554, 0.001079247915185988, 0.1299741566181183, -0.05099494755268097, 0.04833288490772247, 0.11723405867815018, 0.1657063066959381, 0.038906849920749664, -0.1504412591457367, 0.0931963250041008, -0.0221693255007267, -0.17426513135433197, -0.025908516719937325, -0.11906673014163971, 0.13704420626163483, -0.007050731219351292, -0.0826987698674202, -0.07965778559446335, 0.06780512630939484, 0.14997662603855133, 0.0028768565971404314, -0.004995201714336872, -0.011509322561323643, 0.011365228332579136, -0.11332972347736359, -0.007237865589559078, 0.021567445248365402, 0.10021666437387466, 0.0569443516433239, -0.12973427772521973, 0.01654829829931259, 0.021272407844662666, 0.024081647396087646, 0.13953572511672974, -0.058644697070121765, 0.029452519491314888, -0.13276292383670807, -0.14104704558849335, 0.026779942214488983, -0.0018583368510007858, -0.03180253133177757, 0.047483913600444794, 0.11042041331529617, 0.050894446671009064, 0.00021552924590650946, -0.07913704216480255, -0.11377303302288055, -0.07654628157615662, 0.09128271788358688, -0.01641068607568741, 0.015069553628563881, -0.2700490951538086, -0.024579385295510292, -0.10315985232591629, 0.019518809393048286, 0.0794394314289093, -0.13421593606472015, -0.1406085342168808, 0.12828651070594788, -0.03530075401067734, -0.006723486352711916, -0.05676986649632454, 0.024043472483754158, 0.01984570361673832, 0.06731604039669037, -0.06121943145990372, -0.006836276967078447, 0.08852366358041763, -0.12452474236488342, -0.136862114071846, -0.03270566463470459, 0.04500633478164673, -0.007675456814467907, 0.048014428466558456, 0.3220864534378052, -0.09477582573890686, -0.14858946204185486, 0.023447344079613686, 0.16569222509860992, -0.1419481635093689, -0.2927670180797577, 0.1583244502544403, -0.15885727107524872, -0.19223740696907043, 0.030615393072366714, -0.07611273229122162, 0.06968764215707779, -0.02425788715481758, -0.07112624496221542, -0.005898640025407076, -0.002532435115426779, -0.012463347055017948, -0.011668553575873375, 0.060650162398815155, -0.0756903886795044, 0.03357357904314995, -0.0334126353263855, 0.009430586360394955, 0.12103594094514847, 0.022885657846927643, -0.066269151866436, 0.03878236189484596, -0.01286366581916809, -0.03890641778707504, -0.013160020112991333, -0.1100722998380661, 0.004654583055526018, -0.021613886579871178, 0.1057988852262497, 0.07398749142885208, 0.041751928627491, -0.04417942091822624, 0.01180283259600401, 0.026241431012749672, 0.02483249083161354, 0.039946261793375015, 0.03742695599794388, -0.04028228297829628, 0.044406790286302567, -0.002096340525895357, -0.06753440946340561, -0.057650335133075714, -0.058899614959955215, 0.09979470074176788, -0.10009931027889252, -0.030674029141664505, 0.025808164849877357, -0.010205413214862347, -0.044147372245788574, 0.07477088272571564, -0.0013548462884500623, 0.1530485451221466, 0.038643546402454376, -0.05597811937332153, 0.21129101514816284, -0.009901032783091068, 0.2866297960281372, 0.15512865781784058, -0.07598979026079178, -0.012863542884588242, -0.09653817117214203, -0.0004948099376633763, 0.012917452491819859, 0.07904115319252014, 0.008724943734705448, -0.04874264448881149, -0.03343027085065842, 0.027645809575915337, -0.05510983243584633, 0.06803224235773087, -0.012355944141745567, -0.10175958275794983, -0.0892195776104927, 0.03899996727705002, 0.19694110751152039, -0.15195539593696594, 0.12848958373069763, 0.3233242630958557, 0.06793710589408875, 0.07062181085348129, -0.049192700535058975, -0.0012723287800326943, -0.10140521079301834, 0.0164643544703722, -0.01462018396705389, 0.14596471190452576, 0.003842130769044161, -0.0002370373549638316, 0.04722357541322708, 0.03609926626086235, 0.07403985410928726, -0.20491303503513336, -0.17090891301631927, -0.011870107613503933, -0.05170787125825882, -0.1899663805961609, 0.08301481604576111, -0.1198904886841774, 0.04049666225910187, 0.045991454273462296, -0.06599627435207367, 0.16138100624084473, -0.0007744339527562261, -0.06077214702963829, 0.09152770787477493, -0.1734369695186615, -0.12296108156442642, -0.2376421093940735, -0.12598401308059692, -0.028044309467077255, 0.032447464764118195, 0.05780660733580589, -0.08739054203033447, -0.04762697219848633, 0.027562079951167107, -0.08632923662662506, -0.12482284754514694, -0.021621333435177803, 0.05951426550745964, 0.05151180177927017, -0.0026924253907054663, -0.11627645045518875, -0.07711819559335709, -0.05856318026781082, -0.028634410351514816, 0.04812276363372803, -0.06949388980865479, 0.10004964470863342, 0.0727771744132042, 0.014924739487469196, 0.05391653999686241, -0.038266971707344055, 0.12638762593269348, 0.018474137410521507, -0.1398984044790268, 0.15068213641643524, 0.05103401839733124, 0.04420512914657593, 0.09414511173963547, 0.1160510703921318, -0.1272447258234024, -0.015044702216982841, -0.06944860517978668, -0.13283683359622955, -0.2671384811401367, -0.04225166514515877, -0.05938207730650902, 0.10568881779909134, 0.04112754389643669, 0.12008270621299744, 0.08509686589241028, 0.0559706874191761, 0.09546919912099838, 0.017850812524557114, -0.03741317614912987, 0.04707177355885506, 0.2642659544944763, -0.05305059626698494, -0.005574636161327362, -0.154697448015213, 0.06320696324110031, 0.15320025384426117, 0.13553577661514282, 0.17898069322109222, 0.2694076597690582, 0.08926902711391449, 0.13733409345149994, 0.13240382075309753, 0.08147801458835602, 0.03539488837122917, 0.04686462879180908, -0.05477992817759514, -0.017397308722138405, -0.036215223371982574, 0.006339398678392172, 0.07213708013296127, -0.03159749135375023, -0.15979263186454773, 0.036825504153966904, -0.23541903495788574, -0.02853390760719776, -0.0847424641251564, 0.10427100956439972, -0.11082550138235092, 0.08364254981279373, 0.05076687037944794, 0.10960989445447922, -0.018582671880722046, 0.11964336037635803, -0.08442090451717377, -0.041027821600437164, 0.03606424108147621, 0.01471842359751463, 0.016673436388373375, 0.02732170559465885, 0.021632028743624687, -0.039898913353681564, -0.12647387385368347, 0.0384940467774868, 0.13429366052150726, -0.17563265562057495, 0.30412307381629944, 0.032803621143102646, -0.0732181966304779, -0.008614916354417801, -0.0443551167845726, 0.002798189874738455, 0.09647638350725174, 0.15158817172050476, 0.07013341039419174, -0.16544191539287567, -0.1717018485069275, -0.04020378738641739, 0.0069640944711863995, 0.07619845122098923, 0.0753999650478363, -0.16363941133022308, -0.04473279044032097, 0.06036536395549774, 0.003206087974831462, 0.09327130019664764, -0.0855146199464798, -0.04279948025941849, 0.046117398887872696, 0.11838256567716599, 0.022924214601516724, -0.07001089304685593, 0.031098391860723495, -0.0663483589887619, 0.1000213548541069, -0.08463294059038162, 0.026647120714187622, -0.0563635490834713, -0.17743664979934692, 0.045429110527038574, -0.04897017031908035, 0.004422619007527828, -0.047159306704998016, -0.1644982248544693, -0.08853386342525482, -0.1772942841053009, 0.14176788926124573, -0.05302518978714943, 0.019701898097991943, -0.024762995541095734, 0.13063427805900574, -0.07495690137147903, 0.02385064586997032, -0.013122699223458767, 0.06435005366802216, -0.04034735634922981, -0.1523943990468979, 0.1519630253314972, -0.054091792553663254, 0.056303005665540695, -0.019428541883826256, 0.020619111135601997, 0.09284250438213348, 0.059117939323186874, -0.0928477868437767, 0.18696321547031403, 0.30709096789360046, -0.06928541511297226, 0.2048870176076889, 0.25772911310195923, -0.10504318028688431, -0.21627187728881836, -0.10575219243764877, -0.21229985356330872, -0.08748410642147064, 0.07796832174062729, -0.12027391791343689, 0.012616215273737907, 0.21772511303424835, -0.12595732510089874, 0.2496873438358307, -0.21235767006874084, -0.05817436799407005, 0.11999470740556717, 0.005005343817174435, 0.39379534125328064, -0.13052232563495636, -0.12325579673051834, -0.04473181813955307, -0.2695811688899994, 0.14185436069965363, 0.05943354219198227, 0.06815366446971893, -0.05135541781783104, -0.0281148049980402, -0.02063767798244953, -0.049710698425769806, 0.21080376207828522, -0.02354178950190544, 0.09640085697174072, -0.09415912628173828, -0.14697431027889252, 0.19633784890174866, 0.0007854175637476146, 0.06825049221515656, -0.06580302119255066, -0.0181739442050457, -0.0736912414431572, 0.018782583996653557, -0.049341972917318344, 0.05603402480483055, 0.015874337404966354, -0.07739712297916412, -0.08743198215961456, -0.002643972635269165, -0.1473468691110611, -0.04455040022730827, 0.2891409397125244, -0.012590587139129639, 0.027738071978092194, 0.06296105682849884, -0.06375663727521896, -0.14180105924606323, -0.06979939341545105, -0.06386282294988632, -0.08953160792589188, 0.07312382757663727, -0.19431431591510773, -0.03408604487776756, 0.08950793743133545, 0.011032710783183575, 0.0501045361161232, 0.07074721157550812, -0.06780833005905151, 0.031113002449274063, 0.1576135754585266, -0.1242477148771286, -0.006013736594468355, 0.03683760017156601, 0.07120272517204285, 0.1816435158252716, 0.06687675416469574, 0.06246878206729889, -0.003926302306354046, 0.017358573153614998, 0.009618000127375126, 0.029353804886341095, -0.12970486283302307, -0.014845476485788822, 0.06339187175035477, -0.03294039145112038, -0.11228539049625397, 0.17326828837394714, 0.021467596292495728, 0.042097754776477814, 0.0028120577335357666, 0.05720365792512894, -0.04326057434082031, -0.05565632879734039, -0.18864689767360687, -0.04891868680715561, -0.2327180653810501, -0.13419868052005768, -0.0025718819815665483, -0.060076501220464706, -0.016454387456178665, 0.014043271541595459, 0.05472021549940109, 0.13126668334007263, 0.01998680830001831, -0.019946621730923653, 0.08498340100049973, -0.12419957667589188, -0.2165672928094864, 0.025512157008051872, -0.11370352655649185, -0.11238124966621399, 0.03320303186774254, 0.03842826187610626, -0.03791753947734833, -0.04231986030936241, -0.16152562201023102, 0.08045213669538498, -0.05435207113623619, 0.006143098697066307, -0.08239347487688065, 0.010461012832820415, 0.036785248667001724, -0.017942160367965698, -0.0444803461432457, 0.05431593209505081, -0.14688704907894135, -0.013789170421659946, 0.037639208137989044, 0.03389959782361984, -0.08594314754009247, -0.026951732113957405, 0.06145910918712616, 0.11846159398555756, 0.08546064794063568, 0.08087094873189926, 0.06907939910888672, 0.12022364884614944, -0.1722169667482376, -0.011188439093530178, 0.11047817021608353, -0.030981680378317833, -0.020671315491199493, 0.01477271318435669, -0.03278117626905441, 0.09859438985586166, -0.07384422421455383, 0.05233202502131462, -0.043741825968027115, -0.13929328322410583, -0.08425776660442352, -0.03576647862792015, -0.15631277859210968, 0.006932663265615702, -0.12125636637210846, 0.17764903604984283, 0.021411467343568802, 0.09609920531511307, 0.03181961923837662, -0.04743820056319237, -0.012796448543667793, 0.00622852798551321, -0.0307016558945179, -0.08426329493522644, -0.16659845411777496, -0.03421545773744583, -0.07816791534423828, -0.002768451115116477, 0.3381420969963074, 0.014077562838792801, -0.20963391661643982, 0.031177951022982597, 0.17512930929660797, 0.026460090652108192, -0.03435468673706055, 0.23321034014225006, 0.0181129090487957, -0.002490230370312929, -0.07837443798780441, 0.09457921981811523, -0.05993978679180145, -0.2071632444858551, 0.08679889887571335, 0.014125647023320198, 0.024709047749638557, 0.008333208039402962, 0.1536666303873062, -0.11714989691972733, -0.050972919911146164, -0.04818493127822876, 0.06638403981924057, 0.01807892881333828, -0.008862757124006748, 0.0431116484105587, 0.18317541480064392, -0.0011846732813864946, -0.041276488453149796, -0.05615543946623802, -0.001682290923781693, -0.14788976311683655, -0.16852082312107086, 0.017832644283771515, -0.15156692266464233, 0.07798195630311966, 0.0247158482670784, 0.07386747002601624, 0.2686229646205902, 0.02091362327337265, -0.04024294763803482, -0.07309263199567795, -0.14698518812656403, -0.05238876864314079, -0.05559426173567772, -0.0019481832860037684, -0.015618705190718174, -0.11871735751628876, -0.06062033027410507, 0.008861912414431572, -0.17051632702350616, -0.017843889072537422, 0.022202324122190475, 0.06730619817972183, -0.03082367591559887, -0.06127794831991196, -0.021840956062078476, -0.08256470412015915, 0.11850491166114807, 0.013380646705627441, 0.1922147274017334, 0.002363965380936861, 0.02152397856116295, 0.06541979312896729, 0.07504060864448547, -0.025054825469851494, -0.03952908515930176, 0.05656706914305687, 0.11510618031024933, -0.0340772308409214, 0.12248662859201431, -0.08237875998020172, 0.017735185101628304, 0.02840896137058735, 0.14759686589241028, 0.2486150562763214, -0.008233543485403061, 0.005738352425396442, -0.011111770756542683, 0.027162102982401848, 0.06074235960841179, 0.18176570534706116, 0.04011397808790207, 0.24796965718269348, -0.058024100959300995, -0.0473875030875206, -0.046122629195451736, 0.06956000626087189, -0.04548564925789833, 0.03270561993122101, 0.05158774554729462, -0.07551943510770798, -0.0952831357717514, 0.09524384140968323, -0.05652107670903206, 0.19703064858913422, 0.14765134453773499, -0.08922091126441956, 0.03256192430853844, 0.02452469989657402, 0.12563015520572662, -0.018797757104039192, 0.07198867201805115, -0.12426619976758957, -0.07221858203411102, -0.0734105184674263, 0.022166263312101364, -0.3951672911643982, -0.17169548571109772, 0.06154925003647804, 0.17694011330604553, 0.2120596021413803, -0.00019302334112580866, 0.18718552589416504, 0.02349040098488331, 0.058126769959926605, -0.12139002978801727, 0.17338690161705017, 0.009296683594584465, -0.11376041918992996, -0.09153575450181961, -0.23780296742916107, -0.009771681390702724, 0.003507035318762064, 0.024881212040781975, 0.07073181122541428, 0.017958877608180046, 0.1813250184059143, -0.03868448734283447, -0.03900706768035889, -0.035014305263757706, -0.1361909806728363, 0.08075439184904099, -0.03833466023206711, -0.0003542676568031311, -0.12120182812213898, -0.04765581339597702, 0.00433981092646718, 0.11490780860185623, -0.201996311545372, -0.040807560086250305, 0.12703363597393036, 0.02873135730624199, 0.17414388060569763, -0.02473011426627636, -0.08207273483276367, -0.036312323063611984, -0.09720838814973831, 0.05908244475722313, -0.0836150273680687, 0.004550487734377384, 0.09296626597642899, -0.010909613221883774, 0.01803293637931347, -0.19870401918888092, 0.052928369492292404, -0.011450350284576416, -0.05307558923959732, -0.10359866172075272 ]
b296503d2eebe42f6250b34d3c340dd03a8e75fd
# Danbooru 2023 webp: A space-efficient version of Danbooru 2023 This dataset is a resized/re-encoded version of [danbooru2023](https://huggingface.co/datasets/nyanko7/danbooru2023).<br> Which removed the non-image/truncated files and resize all of them into smaller size. --- ## Details This dataset employs few method to reduce the size and improve the efficiency. ### Size and Format This dataset resize all the image which have more than 2048x2048 pixel into near 2048x2048 pixels with bicubic algorithm.<br> And remove all the image with longer edge larger than 16383 after resize.<br> (one reason is beacuse webp doesn't allow that, another is that aspect ratio is too large/small.) This dataset encode/save all the image with 90% quality webp with pillow library in Python. Which is half size of the 100% quality lossy webp. The total size of this dataset is around 1.3~1.4TB. Which is less than the 20% of original file size. ### Webdataset This dataset use webdataset library to save all the tarfile, therefore, you can also use webdataset to load them easily. This is also a recommended way. The `__key__` of each files is the id of it. You can use this id to query the [metadata database](https://huggingface.co/datasets/KBlueLeaf/danbooru2023-sqlite) easily. --- ## Future work I will open a repo on github for utilizing danbooru-webp and danbooru-sqlite datasets as a dataset exporter for fine-grained-image-task.<br> Since the original danbooru2023 actually doesn't have images published after 2023/11/20, and it may be updated in the future.<br> This dataset will be updated after original dataset is been updated. And maintain the same format.
KBlueLeaf/danbooru2023-webp-2Mpixel
[ "task_categories:image-classification", "task_categories:zero-shot-image-classification", "task_categories:text-to-image", "size_categories:1M<n<10M", "language:en", "license:mit", "art", "anime", "not-for-all-audiences", "region:us" ]
2024-01-25T04:18:45+00:00
{"language": ["en"], "license": "mit", "size_categories": ["1M<n<10M"], "task_categories": ["image-classification", "zero-shot-image-classification", "text-to-image"], "tags": ["art", "anime", "not-for-all-audiences"]}
2024-01-30T18:02:00+00:00
[]
[ "en" ]
TAGS #task_categories-image-classification #task_categories-zero-shot-image-classification #task_categories-text-to-image #size_categories-1M<n<10M #language-English #license-mit #art #anime #not-for-all-audiences #region-us
# Danbooru 2023 webp: A space-efficient version of Danbooru 2023 This dataset is a resized/re-encoded version of danbooru2023.<br> Which removed the non-image/truncated files and resize all of them into smaller size. --- ## Details This dataset employs few method to reduce the size and improve the efficiency. ### Size and Format This dataset resize all the image which have more than 2048x2048 pixel into near 2048x2048 pixels with bicubic algorithm.<br> And remove all the image with longer edge larger than 16383 after resize.<br> (one reason is beacuse webp doesn't allow that, another is that aspect ratio is too large/small.) This dataset encode/save all the image with 90% quality webp with pillow library in Python. Which is half size of the 100% quality lossy webp. The total size of this dataset is around 1.3~1.4TB. Which is less than the 20% of original file size. ### Webdataset This dataset use webdataset library to save all the tarfile, therefore, you can also use webdataset to load them easily. This is also a recommended way. The '__key__' of each files is the id of it. You can use this id to query the metadata database easily. --- ## Future work I will open a repo on github for utilizing danbooru-webp and danbooru-sqlite datasets as a dataset exporter for fine-grained-image-task.<br> Since the original danbooru2023 actually doesn't have images published after 2023/11/20, and it may be updated in the future.<br> This dataset will be updated after original dataset is been updated. And maintain the same format.
[ "# Danbooru 2023 webp: A space-efficient version of Danbooru 2023\n\nThis dataset is a resized/re-encoded version of danbooru2023.<br>\nWhich removed the non-image/truncated files and resize all of them into smaller size.\n\n---", "## Details\nThis dataset employs few method to reduce the size and improve the efficiency.", "### Size and Format\nThis dataset resize all the image which have more than 2048x2048 pixel into near 2048x2048 pixels with bicubic algorithm.<br>\nAnd remove all the image with longer edge larger than 16383 after resize.<br>\n(one reason is beacuse webp doesn't allow that, another is that aspect ratio is too large/small.)\n\nThis dataset encode/save all the image with 90% quality webp with pillow library in Python.\nWhich is half size of the 100% quality lossy webp.\n\nThe total size of this dataset is around 1.3~1.4TB. Which is less than the 20% of original file size.", "### Webdataset\nThis dataset use webdataset library to save all the tarfile, therefore, you can also use webdataset to load them easily. This is also a recommended way.\n\nThe '__key__' of each files is the id of it. You can use this id to query the metadata database easily.\n\n---", "## Future work\nI will open a repo on github for utilizing danbooru-webp and danbooru-sqlite datasets as a dataset exporter for fine-grained-image-task.<br>\nSince the original danbooru2023 actually doesn't have images published after 2023/11/20, and it may be updated in the future.<br>\nThis dataset will be updated after original dataset is been updated. And maintain the same format." ]
[ "TAGS\n#task_categories-image-classification #task_categories-zero-shot-image-classification #task_categories-text-to-image #size_categories-1M<n<10M #language-English #license-mit #art #anime #not-for-all-audiences #region-us \n", "# Danbooru 2023 webp: A space-efficient version of Danbooru 2023\n\nThis dataset is a resized/re-encoded version of danbooru2023.<br>\nWhich removed the non-image/truncated files and resize all of them into smaller size.\n\n---", "## Details\nThis dataset employs few method to reduce the size and improve the efficiency.", "### Size and Format\nThis dataset resize all the image which have more than 2048x2048 pixel into near 2048x2048 pixels with bicubic algorithm.<br>\nAnd remove all the image with longer edge larger than 16383 after resize.<br>\n(one reason is beacuse webp doesn't allow that, another is that aspect ratio is too large/small.)\n\nThis dataset encode/save all the image with 90% quality webp with pillow library in Python.\nWhich is half size of the 100% quality lossy webp.\n\nThe total size of this dataset is around 1.3~1.4TB. Which is less than the 20% of original file size.", "### Webdataset\nThis dataset use webdataset library to save all the tarfile, therefore, you can also use webdataset to load them easily. This is also a recommended way.\n\nThe '__key__' of each files is the id of it. You can use this id to query the metadata database easily.\n\n---", "## Future work\nI will open a repo on github for utilizing danbooru-webp and danbooru-sqlite datasets as a dataset exporter for fine-grained-image-task.<br>\nSince the original danbooru2023 actually doesn't have images published after 2023/11/20, and it may be updated in the future.<br>\nThis dataset will be updated after original dataset is been updated. And maintain the same format." ]
[ 79, 64, 18, 147, 72, 101 ]
[ "passage: TAGS\n#task_categories-image-classification #task_categories-zero-shot-image-classification #task_categories-text-to-image #size_categories-1M<n<10M #language-English #license-mit #art #anime #not-for-all-audiences #region-us \n# Danbooru 2023 webp: A space-efficient version of Danbooru 2023\n\nThis dataset is a resized/re-encoded version of danbooru2023.<br>\nWhich removed the non-image/truncated files and resize all of them into smaller size.\n\n---## Details\nThis dataset employs few method to reduce the size and improve the efficiency.### Size and Format\nThis dataset resize all the image which have more than 2048x2048 pixel into near 2048x2048 pixels with bicubic algorithm.<br>\nAnd remove all the image with longer edge larger than 16383 after resize.<br>\n(one reason is beacuse webp doesn't allow that, another is that aspect ratio is too large/small.)\n\nThis dataset encode/save all the image with 90% quality webp with pillow library in Python.\nWhich is half size of the 100% quality lossy webp.\n\nThe total size of this dataset is around 1.3~1.4TB. Which is less than the 20% of original file size.### Webdataset\nThis dataset use webdataset library to save all the tarfile, therefore, you can also use webdataset to load them easily. This is also a recommended way.\n\nThe '__key__' of each files is the id of it. You can use this id to query the metadata database easily.\n\n---## Future work\nI will open a repo on github for utilizing danbooru-webp and danbooru-sqlite datasets as a dataset exporter for fine-grained-image-task.<br>\nSince the original danbooru2023 actually doesn't have images published after 2023/11/20, and it may be updated in the future.<br>\nThis dataset will be updated after original dataset is been updated. And maintain the same format." ]
[ -0.0209801122546196, 0.12449538707733154, -0.006675648503005505, 0.07159431278705597, 0.09440689533948898, 0.07483668625354767, 0.006399837322533131, 0.1314445286989212, -0.09571328014135361, 0.11249488592147827, 0.02299649268388748, -0.07087519019842148, 0.10166668146848679, 0.1394580900669098, -0.007879852317273617, -0.2512609362602234, 0.04245741665363312, -0.009218541905283928, 0.09083662182092667, 0.0653139129281044, 0.06660477817058563, -0.10969291627407074, 0.05791797488927841, -0.08280442655086517, -0.041373033076524734, -0.003462127409875393, 0.001186094363220036, 0.027481330558657646, -0.018825139850378036, 0.08687927573919296, 0.05776692181825638, -0.026184016838669777, 0.026170270517468452, -0.18588127195835114, 0.012879282236099243, 0.11400725692510605, -0.01215603668242693, 0.04684566706418991, 0.02603279985487461, 0.020732907578349113, 0.0958394780755043, -0.11958467960357666, -0.03266311436891556, 0.026867205277085304, -0.07542216032743454, -0.09328143298625946, -0.13037705421447754, 0.07991460710763931, 0.07411018759012222, 0.031036648899316788, -0.025810537859797478, 0.009344696067273617, -0.042809128761291504, 0.0708921030163765, 0.14842720329761505, -0.13581059873104095, -0.03367248922586441, 0.09245990961790085, -0.04323575645685196, 0.1160125732421875, -0.08875533938407898, -0.024882279336452484, -0.010327844880521297, 0.01839999482035637, 0.011252175085246563, -0.04557851329445839, -0.033004384487867355, -0.0351448655128479, -0.03586488217115402, -0.09182173013687134, 0.17806075513362885, 0.04072415828704834, -0.056123118847608566, -0.10439352691173553, -0.12538030743598938, -0.06149649992585182, -0.0621727779507637, 0.04690663516521454, 0.06156112998723984, 0.03929797187447548, -0.040921084582805634, -0.06509172916412354, -0.08097820729017258, 0.039704956114292145, -0.09908776730298996, 0.07907411456108093, 0.016421640291810036, 0.06437673419713974, -0.03742211312055588, 0.05896265059709549, -0.09836824983358383, -0.0997682437300682, -0.11417848616838455, -0.028588447719812393, -0.08260595798492432, 0.006235821172595024, -0.014229843392968178, -0.040998272597789764, 0.014226343482732773, 0.1982109546661377, -0.15434211492538452, 0.04808739200234413, -0.01476502139121294, 0.0025274003855884075, 0.05256037041544914, 0.17078621685504913, -0.1227840930223465, -0.13188771903514862, 0.027805741876363754, -0.03524915874004364, 0.08315669000148773, -0.020511606708168983, -0.0026964801363646984, -0.06301672011613846, 0.10142257809638977, -0.0008729270775802433, 0.054518844932317734, 0.037370242178440094, -0.07047741860151291, -0.054250460118055344, 0.15601836144924164, -0.1009603887796402, 0.04577250778675079, 0.05330188572406769, -0.028933728113770485, 0.12156949192285538, 0.10260072350502014, -0.044422172009944916, -0.12759540975093842, 0.14957313239574432, -0.059785522520542145, -0.020808177068829536, -0.07634881138801575, -0.1178627535700798, 0.044900212436914444, -0.030591096729040146, -0.01662963442504406, -0.07517840713262558, -0.18115302920341492, -0.02264491654932499, 0.0501103438436985, -0.06290391832590103, -0.015546773560345173, 0.08333215117454529, -0.0740228220820427, -0.028192035853862762, -0.015208509750664234, 0.011709524318575859, -0.04277782514691353, 0.07358375191688538, -0.045907385647296906, 0.0599866546690464, -0.08538287878036499, 0.007665102835744619, -0.04837740585207939, 0.02513316459953785, -0.2084687352180481, 0.09480713307857513, 0.032134030014276505, 0.004950002301484346, -0.10229700803756714, -0.08213295042514801, -0.13123437762260437, -0.04438270628452301, 0.02787693403661251, 0.11022315174341202, -0.07339999079704285, -0.008349135518074036, 0.13836903870105743, -0.05918363854289055, 0.02624516561627388, 0.10634981095790863, -0.018491793423891068, -0.01230010949075222, 0.10297276079654694, 0.05054618418216705, 0.20439551770687103, -0.03951234742999077, -0.16973057389259338, 0.0016177595825865865, -0.04731372743844986, 0.06311119347810745, 0.057010795921087265, 0.005525542888790369, 0.07499627023935318, 0.06199514493346214, 0.04183041304349899, -0.001678131869994104, -0.007668750826269388, -0.031032023951411247, -0.02663145586848259, 0.009736706502735615, -0.006006596144288778, 0.0402374342083931, -0.054701730608940125, 0.028641970828175545, -0.08403189480304718, -0.04526253044605255, 0.09988600015640259, -0.07915693521499634, 0.06683014333248138, -0.06865188479423523, 0.053472716361284256, -0.1228427067399025, 0.06873778998851776, -0.1697746068239212, -0.03356104716658592, 0.029607446864247322, 0.04864002764225006, -0.022246791049838066, -0.0029283117037266493, 0.05894751101732254, -0.05083771422505379, -0.10525333136320114, -0.0003152333665639162, 0.000218139510252513, -0.07586859911680222, -0.06409003585577011, -0.05670733004808426, -0.010903052054345608, -0.034764938056468964, 0.05642908811569214, -0.0548437237739563, -0.0027242242358624935, 0.09041140973567963, 0.07685905694961548, 0.023513397201895714, -0.07570701837539673, 0.008920789696276188, -0.05130033940076828, -0.04163086786866188, -0.07147993892431259, -0.025715798139572144, 0.012629793956875801, 0.0246440377086401, 0.014582671225070953, -0.16873827576637268, -0.11365027725696564, 0.11297270655632019, 0.16446584463119507, -0.012203330174088478, -0.000530566496308893, -0.008778906427323818, -0.010892548598349094, -0.06010359525680542, -0.0816887617111206, 0.07278074324131012, 0.062223926186561584, 0.09034625440835953, -0.08338575810194016, -0.029728395864367485, 0.03734487667679787, 0.00866314209997654, 0.030980469658970833, 0.017962567508220673, 0.006918192375451326, -0.11785309761762619, 0.08914654701948166, 0.007392388768494129, 0.0121699757874012, 0.17226672172546387, -0.009047509171068668, -0.0955457016825676, -0.016976337879896164, 0.06564714014530182, 0.026911403983831406, 0.13744720816612244, 0.06362044811248779, 0.06171906366944313, 0.037787795066833496, 0.03466031700372696, 0.0007332384120672941, -0.06708288937807083, 0.06952304393053055, 0.04759140685200691, -0.05794135108590126, -0.0652187392115593, -0.06690529733896255, 0.038126058876514435, 0.0964253842830658, -0.028767064213752747, 0.11825022101402283, -0.037914399057626724, -0.015393448993563652, -0.07964968681335449, 0.20288020372390747, -0.019703125581145287, -0.12636616826057434, -0.10839538276195526, -0.02801779843866825, -0.08922411501407623, -0.016190171241760254, 0.007891993038356304, -0.05118875950574875, -0.09347021579742432, -0.08742888271808624, 0.05805381014943123, -0.0058294679038226604, 0.019043361768126488, -0.03991299495100975, -0.014113886281847954, 0.08306757360696793, -0.11557380855083466, 0.00839503575116396, -0.013206018134951591, -0.03772136569023132, 0.07531141489744186, -0.010372944176197052, 0.09665185958147049, 0.031297821551561356, -0.05031042918562889, -0.024511871859431267, -0.006111236289143562, 0.1681622713804245, -0.08245670795440674, 0.11684630066156387, 0.07205000519752502, -0.00421126140281558, 0.08646056056022644, 0.17813795804977417, -0.04371573030948639, -0.037586551159620285, 0.00659262714907527, 0.07581806182861328, -0.07159073650836945, -0.13571909070014954, -0.032850105315446854, -0.10466507077217102, 0.09709137678146362, 0.12842144072055817, 0.0345943346619606, -0.05723067745566368, 0.07430464029312134, -0.1596611887216568, 0.08475568145513535, 0.04864909499883652, 0.08210199326276779, 0.021566053852438927, 0.011573773808777332, 0.08228171616792679, -0.04809119552373886, 0.031141558662056923, 0.13559743762016296, 0.055046066641807556, 0.20563219487667084, -0.13743959367275238, 0.16452184319496155, -0.028827469795942307, 0.00783158652484417, -0.03265787288546562, 0.15557651221752167, -0.08250844478607178, 0.03620612621307373, -0.03591732308268547, -0.015295188874006271, -0.013294055126607418, 0.031581781804561615, 0.02815263904631138, -0.012604552321135998, 0.003748637158423662, -0.0071987127885222435, 0.09322331845760345, 0.19108925759792328, 0.03306236490607262, -0.19838234782218933, -0.05759807303547859, 0.05596589669585228, -0.024819117039442062, -0.11181085556745529, -0.05293525755405426, 0.21294701099395752, 0.00030885578598827124, 0.09082411229610443, -0.1098269373178482, 0.0824015811085701, -0.08885473012924194, -0.00864529050886631, -0.042559996247291565, 0.10038360953330994, -0.017909377813339233, 0.078116275370121, -0.10442181676626205, -0.01692020148038864, 0.02906334400177002, 0.055229924619197845, -0.01641562581062317, 0.051911015063524246, -0.013235706835985184, 0.020424460992217064, 0.11952050030231476, 0.02003324031829834, 0.027227317914366722, -0.06299005448818207, -0.11893364042043686, 0.06223321333527565, 0.05881987512111664, -0.020032860338687897, 0.16759152710437775, -0.04985686019062996, -0.0007645456935279071, 0.0058544171042740345, -0.0016287823673337698, 0.008579676039516926, -0.20400097966194153, 0.028650807216763496, -0.03573424741625786, -0.041879333555698395, 0.008254559710621834, 0.0678107813000679, 0.02182985655963421, 0.12189113348722458, -0.13167494535446167, -0.09026198089122772, -0.13320542871952057, 0.13868853449821472, 0.14964552223682404, -0.09031780809164047, 0.012245765887200832, -0.03633984178304672, 0.07912462204694748, -0.02613435499370098, -0.09357210248708725, -0.0005839576479047537, -0.07451765239238739, -0.14709778130054474, -0.016442693769931793, 0.00244563608430326, -0.0900387316942215, 0.04195622727274895, 0.005025045946240425, 0.03322983533143997, -0.041430000215768814, -0.11287564784288406, -0.07383685559034348, 0.04555947333574295, 0.12210970371961594, 0.047945208847522736, -0.054917410016059875, -0.057806797325611115, -0.030040642246603966, 0.0637369379401207, 0.04182247072458267, 0.143707275390625, -0.09972523152828217, 0.019736427813768387, 0.06682086735963821, -0.032274626195430756, -0.2381531447172165, -0.09516163915395737, 0.006852440070360899, 0.023068521171808243, -0.0018425047164782882, -0.17676429450511932, 0.052351560443639755, 0.04486678168177605, 0.03298893943428993, -0.003788221627473831, -0.21724937856197357, -0.06149385869503021, -0.05971883237361908, 0.03644736483693123, 0.1859583556652069, -0.14010298252105713, 0.009019198827445507, -0.04623210057616234, -0.03947501257061958, 0.03483405336737633, -0.1326393485069275, 0.05744808912277222, 0.02084713988006115, 0.025812381878495216, 0.05946826562285423, -0.06329905241727829, 0.14078359305858612, -0.045290980488061905, 0.03703419119119644, -0.05615748092532158, -0.022718921303749084, 0.05530436336994171, -0.07075466960668564, 0.18001562356948853, -0.07862528413534164, 0.08725864440202713, -0.06581173092126846, -0.05037345364689827, -0.06244848296046257, -0.03655586019158363, -0.021628985181450844, -0.022991832345724106, -0.09175311774015427, -0.004217843990772963, 0.13110725581645966, -0.019107598811388016, 0.0365133136510849, 0.02732829377055168, -0.14239266514778137, 0.10230594873428345, 0.04481629282236099, -0.004174945876002312, -0.14141437411308289, -0.02874056063592434, -0.005946814082562923, 0.0905633419752121, -0.22473736107349396, 0.08207612484693527, 0.060090769082307816, 0.001533633447252214, 0.08897055685520172, 0.029014136642217636, -0.10497907549142838, -0.012244214303791523, 0.11487389355897903, -0.099190853536129, -0.22261568903923035, 0.08096539229154587, -0.05937538668513298, -0.2267700582742691, -0.06160091981291771, 0.11675143986940384, -0.002026139758527279, 0.0000510601639689412, 0.026837078854441643, 0.10849129408597946, 0.06676440685987473, 0.14437712728977203, -0.009934179484844208, -0.021983696147799492, -0.1427658647298813, 0.08808738738298416, 0.10568086802959442, -0.06706615537405014, 0.0048661259934306145, 0.028912046924233437, -0.07028690725564957, -0.03213813528418541, -0.01630648784339428, -0.06139839440584183, 0.04237392544746399, -0.0036409255117177963, -0.029417207464575768, -0.027956228703260422, 0.03207527473568916, -0.014216161333024502, -0.018455661833286285, 0.06871504336595535, -0.009229304268956184, 0.012927645817399025, -0.11744926869869232, 0.10411970317363739, 0.08328673243522644, 0.05745546519756317, -0.08727478235960007, 0.006767511833459139, -0.017354782670736313, 0.008313100785017014, -0.011010636575520039, 0.05060915648937225, -0.04067397862672806, -0.04229883477091789, -0.10018305480480194, 0.057713255286216736, -0.04653862118721008, -0.03309835493564606, -0.016112353652715683, 0.009711721912026405, -0.07544824481010437, -0.004854490514844656, -0.04825243726372719, -0.08779894560575485, -0.06223776191473007, 0.015046623535454273, -0.11716328561306, -0.008172391913831234, 0.07358436286449432, -0.1094067320227623, 0.12337570637464523, 0.020918142050504684, -0.0005662222392857075, 0.008285445161163807, 0.024919334799051285, -0.09617277979850769, 0.07267023622989655, 0.04900216683745384, -0.02062034234404564, -0.0328943245112896, 0.0496932677924633, 0.026949746534228325, -0.012024583294987679, -0.006134166847914457, 0.02216070517897606, -0.14178599417209625, 0.05428788438439369, -0.10461944341659546, -0.012127848342061043, -0.07967227697372437, 0.04442097619175911, -0.01429425273090601, 0.0725378766655922, 0.17885234951972961, -0.002092486247420311, -0.01241373922675848, -0.21709848940372467, -0.0005878573283553123, 0.019790617749094963, -0.055067405104637146, -0.012442964129149914, -0.010953441262245178, 0.06052853912115097, 0.028828123584389687, 0.03448769450187683, 0.009465917944908142, -0.005389183294028044, 0.006758037488907576, 0.09104439616203308, -0.05021614953875542, 0.003886493621394038, 0.15978394448757172, 0.021804552525281906, -0.06067142263054848, -0.025411635637283325, 0.015315897762775421, 0.08729413151741028, 0.06448186188936234, 0.11139466613531113, 0.15657883882522583, 0.08222069591283798, 0.052236203104257584, -0.10740841180086136, -0.09024843573570251, 0.0013695230009034276, 0.08797146379947662, -0.13055142760276794, 0.04895482957363129, 0.02236763946712017, -0.02647223137319088, 0.16780675947666168, -0.1386517435312271, 0.11759655922651291, 0.021937645971775055, -0.050769150257110596, -0.06328662484884262, -0.05204923078417778, -0.08506389707326889, -0.04335835203528404, -0.014557795599102974, -0.15279944241046906, 0.022725246846675873, 0.17422662675380707, -0.03179880604147911, 0.018187670037150383, 0.09676156938076019, -0.0030356659553945065, -0.135410338640213, -0.008458306081593037, 0.04156027361750603, 0.013337322510778904, 0.20432443916797638, 0.0074397544376552105, 0.07071967422962189, 0.027113759890198708, 0.16697613894939423, 0.09101518243551254, 0.1503496617078781, 0.08750642836093903, -0.05369117856025696, -0.078083835542202, -0.05369755998253822, 0.007361922413110733, 0.002611404750496149, 0.18593303859233856, 0.013577889651060104, 0.028621800243854523, -0.01248248852789402, 0.19934706389904022, -0.04016057401895523, -0.07723107188940048, -0.2109183371067047, 0.060102589428424835, 0.0739893689751625, 0.007749508135020733, -0.010014301165938377, -0.16451327502727509, -0.0004987715510651469, 0.16611601412296295, 0.0374029166996479, -0.028723688796162605, -0.021424047648906708, -0.0016122363740578294, -0.003611185122281313, 0.0449441559612751, 0.07402768731117249, -0.004823633003979921, 0.23374883830547333, -0.022941116243600845, 0.124413400888443, 0.045363184064626694, -0.0241602323949337, -0.06272336840629578, 0.12445460259914398, -0.04400938004255295, 0.014670578762888908, -0.10710549354553223, 0.015867872163653374, 0.10650943964719772, -0.12565508484840393, 0.07760052382946014, -0.11876010149717331, -0.062315478920936584, 0.011186691001057625, 0.04267818480730057, 0.009346951730549335, 0.09335944056510925, -0.010551312938332558, 0.03210172802209854, 0.1782827526330948, -0.044700268656015396, -0.14886589348316193, -0.0008282990893349051, 0.03478585556149483, -0.009006419219076633, 0.15495558083057404, 0.009937622584402561, -0.03654509782791138, 0.09033920615911484, -0.07969431579113007, -0.17179900407791138, -0.01622578874230385, -0.005509604699909687, -0.020814180374145508, 0.03353174775838852, 0.14502806961536407, -0.07474517822265625, 0.0010222491109743714, 0.08746355026960373, 0.025613553822040558, -0.045362722128629684, 0.046958569437265396, -0.016980668529868126, -0.0747016966342926, 0.012979778461158276, -0.05735044181346893, 0.08772365748882294, 0.1601119190454483, -0.0070016831159591675, 0.030808638781309128, -0.04049479216337204, 0.009951243177056313, 0.025344382971525192, 0.16996479034423828, 0.002046138746663928, -0.01396313589066267, -0.023024797439575195, -0.053610287606716156, 0.08622497320175171, -0.19786949455738068, 0.004422513302415609, 0.09430515021085739, -0.022766070440411568, -0.05645839124917984, 0.14937813580036163, 0.04446757584810257, -0.012734497897326946, -0.03994790464639664, -0.039989762008190155, -0.01155281811952591, 0.0585651658475399, -0.025310194119811058, -0.07190582901239395 ]
033aa857e3f105999b60c9b1d492b3701816027f
# Dataset Card for Evaluation run of dominguesm/canarim-7b <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [dominguesm/canarim-7b](https://huggingface.co/dominguesm/canarim-7b) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_dominguesm__canarim-7b", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-01-25T04:39:02.146933](https://huggingface.co/datasets/open-llm-leaderboard/details_dominguesm__canarim-7b/blob/main/results_2024-01-25T04-39-02.146933.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.4128450113919017, "acc_stderr": 0.0341595304064157, "acc_norm": 0.41725828050909847, "acc_norm_stderr": 0.03494988220974851, "mc1": 0.25091799265605874, "mc1_stderr": 0.015176985027707687, "mc2": 0.4002971290542134, "mc2_stderr": 0.013722080397364233 }, "harness|arc:challenge|25": { "acc": 0.4709897610921502, "acc_stderr": 0.01458677635529432, "acc_norm": 0.5196245733788396, "acc_norm_stderr": 0.01460013207594709 }, "harness|hellaswag|10": { "acc": 0.5755825532762398, "acc_stderr": 0.0049324414796655305, "acc_norm": 0.7752439753037244, "acc_norm_stderr": 0.004165684625540424 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.29, "acc_stderr": 0.04560480215720684, "acc_norm": 0.29, "acc_norm_stderr": 0.04560480215720684 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.42962962962962964, "acc_stderr": 0.04276349494376599, "acc_norm": 0.42962962962962964, "acc_norm_stderr": 0.04276349494376599 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.3618421052631579, "acc_stderr": 0.03910525752849724, "acc_norm": 0.3618421052631579, "acc_norm_stderr": 0.03910525752849724 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.45, "acc_stderr": 0.05, "acc_norm": 0.45, "acc_norm_stderr": 0.05 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.3283018867924528, "acc_stderr": 0.028901593612411784, "acc_norm": 0.3283018867924528, "acc_norm_stderr": 0.028901593612411784 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.3958333333333333, "acc_stderr": 0.04089465449325582, "acc_norm": 0.3958333333333333, "acc_norm_stderr": 0.04089465449325582 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.29, "acc_stderr": 0.045604802157206845, "acc_norm": 0.29, "acc_norm_stderr": 0.045604802157206845 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.36, "acc_stderr": 0.048241815132442176, "acc_norm": 0.36, "acc_norm_stderr": 0.048241815132442176 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.35, "acc_stderr": 0.047937248544110196, "acc_norm": 0.35, "acc_norm_stderr": 0.047937248544110196 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.3236994219653179, "acc_stderr": 0.035676037996391706, "acc_norm": 0.3236994219653179, "acc_norm_stderr": 0.035676037996391706 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.17647058823529413, "acc_stderr": 0.03793281185307809, "acc_norm": 0.17647058823529413, "acc_norm_stderr": 0.03793281185307809 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.57, "acc_stderr": 0.049756985195624284, "acc_norm": 0.57, "acc_norm_stderr": 0.049756985195624284 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.39574468085106385, "acc_stderr": 0.03196758697835362, "acc_norm": 0.39574468085106385, "acc_norm_stderr": 0.03196758697835362 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.21052631578947367, "acc_stderr": 0.03835153954399421, "acc_norm": 0.21052631578947367, "acc_norm_stderr": 0.03835153954399421 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.43448275862068964, "acc_stderr": 0.04130740879555497, "acc_norm": 0.43448275862068964, "acc_norm_stderr": 0.04130740879555497 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.24603174603174602, "acc_stderr": 0.02218203720294836, "acc_norm": 0.24603174603174602, "acc_norm_stderr": 0.02218203720294836 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.3333333333333333, "acc_stderr": 0.042163702135578345, "acc_norm": 0.3333333333333333, "acc_norm_stderr": 0.042163702135578345 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.3, "acc_stderr": 0.046056618647183814, "acc_norm": 0.3, "acc_norm_stderr": 0.046056618647183814 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.4, "acc_stderr": 0.027869320571664632, "acc_norm": 0.4, "acc_norm_stderr": 0.027869320571664632 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.2561576354679803, "acc_stderr": 0.0307127300709826, "acc_norm": 0.2561576354679803, "acc_norm_stderr": 0.0307127300709826 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.35, "acc_stderr": 0.047937248544110196, "acc_norm": 0.35, "acc_norm_stderr": 0.047937248544110196 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.5151515151515151, "acc_stderr": 0.03902551007374448, "acc_norm": 0.5151515151515151, "acc_norm_stderr": 0.03902551007374448 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.3939393939393939, "acc_stderr": 0.03481285338232963, "acc_norm": 0.3939393939393939, "acc_norm_stderr": 0.03481285338232963 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.5699481865284974, "acc_stderr": 0.03572954333144808, "acc_norm": 0.5699481865284974, "acc_norm_stderr": 0.03572954333144808 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.35384615384615387, "acc_stderr": 0.024243783994062164, "acc_norm": 0.35384615384615387, "acc_norm_stderr": 0.024243783994062164 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.26666666666666666, "acc_stderr": 0.02696242432507384, "acc_norm": 0.26666666666666666, "acc_norm_stderr": 0.02696242432507384 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.3487394957983193, "acc_stderr": 0.030956636328566548, "acc_norm": 0.3487394957983193, "acc_norm_stderr": 0.030956636328566548 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.271523178807947, "acc_stderr": 0.03631329803969653, "acc_norm": 0.271523178807947, "acc_norm_stderr": 0.03631329803969653 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.4954128440366973, "acc_stderr": 0.021436420955529428, "acc_norm": 0.4954128440366973, "acc_norm_stderr": 0.021436420955529428 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.19907407407407407, "acc_stderr": 0.027232298462690218, "acc_norm": 0.19907407407407407, "acc_norm_stderr": 0.027232298462690218 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.4362745098039216, "acc_stderr": 0.03480693138457038, "acc_norm": 0.4362745098039216, "acc_norm_stderr": 0.03480693138457038 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.4641350210970464, "acc_stderr": 0.03246338898055659, "acc_norm": 0.4641350210970464, "acc_norm_stderr": 0.03246338898055659 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.484304932735426, "acc_stderr": 0.0335412657542081, "acc_norm": 0.484304932735426, "acc_norm_stderr": 0.0335412657542081 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.4580152671755725, "acc_stderr": 0.04369802690578757, "acc_norm": 0.4580152671755725, "acc_norm_stderr": 0.04369802690578757 }, "harness|hendrycksTest-international_law|5": { "acc": 0.6115702479338843, "acc_stderr": 0.044492703500683836, "acc_norm": 0.6115702479338843, "acc_norm_stderr": 0.044492703500683836 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.48148148148148145, "acc_stderr": 0.04830366024635331, "acc_norm": 0.48148148148148145, "acc_norm_stderr": 0.04830366024635331 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.4662576687116564, "acc_stderr": 0.039194155450484096, "acc_norm": 0.4662576687116564, "acc_norm_stderr": 0.039194155450484096 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.4017857142857143, "acc_stderr": 0.04653333146973646, "acc_norm": 0.4017857142857143, "acc_norm_stderr": 0.04653333146973646 }, "harness|hendrycksTest-management|5": { "acc": 0.4174757281553398, "acc_stderr": 0.04882840548212238, "acc_norm": 0.4174757281553398, "acc_norm_stderr": 0.04882840548212238 }, "harness|hendrycksTest-marketing|5": { "acc": 0.6282051282051282, "acc_stderr": 0.03166098891888079, "acc_norm": 0.6282051282051282, "acc_norm_stderr": 0.03166098891888079 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.51, "acc_stderr": 0.05024183937956914, "acc_norm": 0.51, "acc_norm_stderr": 0.05024183937956914 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.5708812260536399, "acc_stderr": 0.01769938848312678, "acc_norm": 0.5708812260536399, "acc_norm_stderr": 0.01769938848312678 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.4624277456647399, "acc_stderr": 0.026842985519615375, "acc_norm": 0.4624277456647399, "acc_norm_stderr": 0.026842985519615375 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.23798882681564246, "acc_stderr": 0.014242630070574915, "acc_norm": 0.23798882681564246, "acc_norm_stderr": 0.014242630070574915 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.4084967320261438, "acc_stderr": 0.02814640599309636, "acc_norm": 0.4084967320261438, "acc_norm_stderr": 0.02814640599309636 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.5241157556270096, "acc_stderr": 0.02836504154256457, "acc_norm": 0.5241157556270096, "acc_norm_stderr": 0.02836504154256457 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.4691358024691358, "acc_stderr": 0.027767689606833935, "acc_norm": 0.4691358024691358, "acc_norm_stderr": 0.027767689606833935 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.30141843971631205, "acc_stderr": 0.02737412888263115, "acc_norm": 0.30141843971631205, "acc_norm_stderr": 0.02737412888263115 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.3305084745762712, "acc_stderr": 0.012014142101842958, "acc_norm": 0.3305084745762712, "acc_norm_stderr": 0.012014142101842958 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.49264705882352944, "acc_stderr": 0.030369552523902173, "acc_norm": 0.49264705882352944, "acc_norm_stderr": 0.030369552523902173 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.4526143790849673, "acc_stderr": 0.02013679091849254, "acc_norm": 0.4526143790849673, "acc_norm_stderr": 0.02013679091849254 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.5272727272727272, "acc_stderr": 0.04782001791380061, "acc_norm": 0.5272727272727272, "acc_norm_stderr": 0.04782001791380061 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.40816326530612246, "acc_stderr": 0.03146465712827424, "acc_norm": 0.40816326530612246, "acc_norm_stderr": 0.03146465712827424 }, "harness|hendrycksTest-sociology|5": { "acc": 0.5124378109452736, "acc_stderr": 0.03534439848539579, "acc_norm": 0.5124378109452736, "acc_norm_stderr": 0.03534439848539579 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.6, "acc_stderr": 0.049236596391733084, "acc_norm": 0.6, "acc_norm_stderr": 0.049236596391733084 }, "harness|hendrycksTest-virology|5": { "acc": 0.3795180722891566, "acc_stderr": 0.03777798822748017, "acc_norm": 0.3795180722891566, "acc_norm_stderr": 0.03777798822748017 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.6198830409356725, "acc_stderr": 0.037229657413855394, "acc_norm": 0.6198830409356725, "acc_norm_stderr": 0.037229657413855394 }, "harness|truthfulqa:mc|0": { "mc1": 0.25091799265605874, "mc1_stderr": 0.015176985027707687, "mc2": 0.4002971290542134, "mc2_stderr": 0.013722080397364233 }, "harness|winogrande|5": { "acc": 0.7142857142857143, "acc_stderr": 0.01269653187003862 }, "harness|gsm8k|5": { "acc": 0.09931766489764973, "acc_stderr": 0.008238371412683989 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_dominguesm__canarim-7b
[ "region:us" ]
2024-01-25T04:41:27+00:00
{"pretty_name": "Evaluation run of dominguesm/canarim-7b", "dataset_summary": "Dataset automatically created during the evaluation run of model [dominguesm/canarim-7b](https://huggingface.co/dominguesm/canarim-7b) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_dominguesm__canarim-7b\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-01-25T04:39:02.146933](https://huggingface.co/datasets/open-llm-leaderboard/details_dominguesm__canarim-7b/blob/main/results_2024-01-25T04-39-02.146933.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.4128450113919017,\n \"acc_stderr\": 0.0341595304064157,\n \"acc_norm\": 0.41725828050909847,\n \"acc_norm_stderr\": 0.03494988220974851,\n \"mc1\": 0.25091799265605874,\n \"mc1_stderr\": 0.015176985027707687,\n \"mc2\": 0.4002971290542134,\n \"mc2_stderr\": 0.013722080397364233\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.4709897610921502,\n \"acc_stderr\": 0.01458677635529432,\n \"acc_norm\": 0.5196245733788396,\n \"acc_norm_stderr\": 0.01460013207594709\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.5755825532762398,\n \"acc_stderr\": 0.0049324414796655305,\n \"acc_norm\": 0.7752439753037244,\n \"acc_norm_stderr\": 0.004165684625540424\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.29,\n \"acc_stderr\": 0.04560480215720684,\n \"acc_norm\": 0.29,\n \"acc_norm_stderr\": 0.04560480215720684\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.42962962962962964,\n \"acc_stderr\": 0.04276349494376599,\n \"acc_norm\": 0.42962962962962964,\n \"acc_norm_stderr\": 0.04276349494376599\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.3618421052631579,\n \"acc_stderr\": 0.03910525752849724,\n \"acc_norm\": 0.3618421052631579,\n \"acc_norm_stderr\": 0.03910525752849724\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.45,\n \"acc_stderr\": 0.05,\n \"acc_norm\": 0.45,\n \"acc_norm_stderr\": 0.05\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.3283018867924528,\n \"acc_stderr\": 0.028901593612411784,\n \"acc_norm\": 0.3283018867924528,\n \"acc_norm_stderr\": 0.028901593612411784\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.3958333333333333,\n \"acc_stderr\": 0.04089465449325582,\n \"acc_norm\": 0.3958333333333333,\n \"acc_norm_stderr\": 0.04089465449325582\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.29,\n \"acc_stderr\": 0.045604802157206845,\n \"acc_norm\": 0.29,\n \"acc_norm_stderr\": 0.045604802157206845\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.36,\n \"acc_stderr\": 0.048241815132442176,\n \"acc_norm\": 0.36,\n \"acc_norm_stderr\": 0.048241815132442176\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.35,\n \"acc_stderr\": 0.047937248544110196,\n \"acc_norm\": 0.35,\n \"acc_norm_stderr\": 0.047937248544110196\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.3236994219653179,\n \"acc_stderr\": 0.035676037996391706,\n \"acc_norm\": 0.3236994219653179,\n \"acc_norm_stderr\": 0.035676037996391706\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.17647058823529413,\n \"acc_stderr\": 0.03793281185307809,\n \"acc_norm\": 0.17647058823529413,\n \"acc_norm_stderr\": 0.03793281185307809\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.57,\n \"acc_stderr\": 0.049756985195624284,\n \"acc_norm\": 0.57,\n \"acc_norm_stderr\": 0.049756985195624284\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.39574468085106385,\n \"acc_stderr\": 0.03196758697835362,\n \"acc_norm\": 0.39574468085106385,\n \"acc_norm_stderr\": 0.03196758697835362\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.21052631578947367,\n \"acc_stderr\": 0.03835153954399421,\n \"acc_norm\": 0.21052631578947367,\n \"acc_norm_stderr\": 0.03835153954399421\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.43448275862068964,\n \"acc_stderr\": 0.04130740879555497,\n \"acc_norm\": 0.43448275862068964,\n \"acc_norm_stderr\": 0.04130740879555497\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.24603174603174602,\n \"acc_stderr\": 0.02218203720294836,\n \"acc_norm\": 0.24603174603174602,\n \"acc_norm_stderr\": 0.02218203720294836\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.3333333333333333,\n \"acc_stderr\": 0.042163702135578345,\n \"acc_norm\": 0.3333333333333333,\n \"acc_norm_stderr\": 0.042163702135578345\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.3,\n \"acc_stderr\": 0.046056618647183814,\n \"acc_norm\": 0.3,\n \"acc_norm_stderr\": 0.046056618647183814\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.4,\n \"acc_stderr\": 0.027869320571664632,\n \"acc_norm\": 0.4,\n \"acc_norm_stderr\": 0.027869320571664632\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.2561576354679803,\n \"acc_stderr\": 0.0307127300709826,\n \"acc_norm\": 0.2561576354679803,\n \"acc_norm_stderr\": 0.0307127300709826\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.35,\n \"acc_stderr\": 0.047937248544110196,\n \"acc_norm\": 0.35,\n \"acc_norm_stderr\": 0.047937248544110196\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.5151515151515151,\n \"acc_stderr\": 0.03902551007374448,\n \"acc_norm\": 0.5151515151515151,\n \"acc_norm_stderr\": 0.03902551007374448\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.3939393939393939,\n \"acc_stderr\": 0.03481285338232963,\n \"acc_norm\": 0.3939393939393939,\n \"acc_norm_stderr\": 0.03481285338232963\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.5699481865284974,\n \"acc_stderr\": 0.03572954333144808,\n \"acc_norm\": 0.5699481865284974,\n \"acc_norm_stderr\": 0.03572954333144808\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.35384615384615387,\n \"acc_stderr\": 0.024243783994062164,\n \"acc_norm\": 0.35384615384615387,\n \"acc_norm_stderr\": 0.024243783994062164\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.26666666666666666,\n \"acc_stderr\": 0.02696242432507384,\n \"acc_norm\": 0.26666666666666666,\n \"acc_norm_stderr\": 0.02696242432507384\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.3487394957983193,\n \"acc_stderr\": 0.030956636328566548,\n \"acc_norm\": 0.3487394957983193,\n \"acc_norm_stderr\": 0.030956636328566548\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.271523178807947,\n \"acc_stderr\": 0.03631329803969653,\n \"acc_norm\": 0.271523178807947,\n \"acc_norm_stderr\": 0.03631329803969653\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.4954128440366973,\n \"acc_stderr\": 0.021436420955529428,\n \"acc_norm\": 0.4954128440366973,\n \"acc_norm_stderr\": 0.021436420955529428\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.19907407407407407,\n \"acc_stderr\": 0.027232298462690218,\n \"acc_norm\": 0.19907407407407407,\n \"acc_norm_stderr\": 0.027232298462690218\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.4362745098039216,\n \"acc_stderr\": 0.03480693138457038,\n \"acc_norm\": 0.4362745098039216,\n \"acc_norm_stderr\": 0.03480693138457038\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.4641350210970464,\n \"acc_stderr\": 0.03246338898055659,\n \"acc_norm\": 0.4641350210970464,\n \"acc_norm_stderr\": 0.03246338898055659\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.484304932735426,\n \"acc_stderr\": 0.0335412657542081,\n \"acc_norm\": 0.484304932735426,\n \"acc_norm_stderr\": 0.0335412657542081\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.4580152671755725,\n \"acc_stderr\": 0.04369802690578757,\n \"acc_norm\": 0.4580152671755725,\n \"acc_norm_stderr\": 0.04369802690578757\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.6115702479338843,\n \"acc_stderr\": 0.044492703500683836,\n \"acc_norm\": 0.6115702479338843,\n \"acc_norm_stderr\": 0.044492703500683836\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.48148148148148145,\n \"acc_stderr\": 0.04830366024635331,\n \"acc_norm\": 0.48148148148148145,\n \"acc_norm_stderr\": 0.04830366024635331\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.4662576687116564,\n \"acc_stderr\": 0.039194155450484096,\n \"acc_norm\": 0.4662576687116564,\n \"acc_norm_stderr\": 0.039194155450484096\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.4017857142857143,\n \"acc_stderr\": 0.04653333146973646,\n \"acc_norm\": 0.4017857142857143,\n \"acc_norm_stderr\": 0.04653333146973646\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.4174757281553398,\n \"acc_stderr\": 0.04882840548212238,\n \"acc_norm\": 0.4174757281553398,\n \"acc_norm_stderr\": 0.04882840548212238\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.6282051282051282,\n \"acc_stderr\": 0.03166098891888079,\n \"acc_norm\": 0.6282051282051282,\n \"acc_norm_stderr\": 0.03166098891888079\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.51,\n \"acc_stderr\": 0.05024183937956914,\n \"acc_norm\": 0.51,\n \"acc_norm_stderr\": 0.05024183937956914\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.5708812260536399,\n \"acc_stderr\": 0.01769938848312678,\n \"acc_norm\": 0.5708812260536399,\n \"acc_norm_stderr\": 0.01769938848312678\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.4624277456647399,\n \"acc_stderr\": 0.026842985519615375,\n \"acc_norm\": 0.4624277456647399,\n \"acc_norm_stderr\": 0.026842985519615375\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.23798882681564246,\n \"acc_stderr\": 0.014242630070574915,\n \"acc_norm\": 0.23798882681564246,\n \"acc_norm_stderr\": 0.014242630070574915\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.4084967320261438,\n \"acc_stderr\": 0.02814640599309636,\n \"acc_norm\": 0.4084967320261438,\n \"acc_norm_stderr\": 0.02814640599309636\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.5241157556270096,\n \"acc_stderr\": 0.02836504154256457,\n \"acc_norm\": 0.5241157556270096,\n \"acc_norm_stderr\": 0.02836504154256457\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.4691358024691358,\n \"acc_stderr\": 0.027767689606833935,\n \"acc_norm\": 0.4691358024691358,\n \"acc_norm_stderr\": 0.027767689606833935\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.30141843971631205,\n \"acc_stderr\": 0.02737412888263115,\n \"acc_norm\": 0.30141843971631205,\n \"acc_norm_stderr\": 0.02737412888263115\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.3305084745762712,\n \"acc_stderr\": 0.012014142101842958,\n \"acc_norm\": 0.3305084745762712,\n \"acc_norm_stderr\": 0.012014142101842958\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.49264705882352944,\n \"acc_stderr\": 0.030369552523902173,\n \"acc_norm\": 0.49264705882352944,\n \"acc_norm_stderr\": 0.030369552523902173\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.4526143790849673,\n \"acc_stderr\": 0.02013679091849254,\n \"acc_norm\": 0.4526143790849673,\n \"acc_norm_stderr\": 0.02013679091849254\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.5272727272727272,\n \"acc_stderr\": 0.04782001791380061,\n \"acc_norm\": 0.5272727272727272,\n \"acc_norm_stderr\": 0.04782001791380061\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.40816326530612246,\n \"acc_stderr\": 0.03146465712827424,\n \"acc_norm\": 0.40816326530612246,\n \"acc_norm_stderr\": 0.03146465712827424\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.5124378109452736,\n \"acc_stderr\": 0.03534439848539579,\n \"acc_norm\": 0.5124378109452736,\n \"acc_norm_stderr\": 0.03534439848539579\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.6,\n \"acc_stderr\": 0.049236596391733084,\n \"acc_norm\": 0.6,\n \"acc_norm_stderr\": 0.049236596391733084\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.3795180722891566,\n \"acc_stderr\": 0.03777798822748017,\n \"acc_norm\": 0.3795180722891566,\n \"acc_norm_stderr\": 0.03777798822748017\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.6198830409356725,\n \"acc_stderr\": 0.037229657413855394,\n \"acc_norm\": 0.6198830409356725,\n \"acc_norm_stderr\": 0.037229657413855394\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.25091799265605874,\n \"mc1_stderr\": 0.015176985027707687,\n \"mc2\": 0.4002971290542134,\n \"mc2_stderr\": 0.013722080397364233\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7142857142857143,\n \"acc_stderr\": 0.01269653187003862\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.09931766489764973,\n \"acc_stderr\": 0.008238371412683989\n }\n}\n```", "repo_url": "https://huggingface.co/dominguesm/canarim-7b", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_01_25T04_39_02.146933", "path": ["**/details_harness|arc:challenge|25_2024-01-25T04-39-02.146933.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-01-25T04-39-02.146933.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_01_25T04_39_02.146933", "path": ["**/details_harness|gsm8k|5_2024-01-25T04-39-02.146933.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-01-25T04-39-02.146933.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_01_25T04_39_02.146933", "path": ["**/details_harness|hellaswag|10_2024-01-25T04-39-02.146933.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-01-25T04-39-02.146933.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_01_25T04_39_02.146933", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-25T04-39-02.146933.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-25T04-39-02.146933.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-25T04-39-02.146933.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-25T04-39-02.146933.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-25T04-39-02.146933.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-25T04-39-02.146933.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-25T04-39-02.146933.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-25T04-39-02.146933.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-25T04-39-02.146933.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-25T04-39-02.146933.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-25T04-39-02.146933.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-25T04-39-02.146933.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-25T04-39-02.146933.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-25T04-39-02.146933.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-25T04-39-02.146933.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-25T04-39-02.146933.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-25T04-39-02.146933.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-25T04-39-02.146933.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-25T04-39-02.146933.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-25T04-39-02.146933.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-25T04-39-02.146933.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-25T04-39-02.146933.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-25T04-39-02.146933.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-25T04-39-02.146933.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-25T04-39-02.146933.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-25T04-39-02.146933.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-25T04-39-02.146933.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-25T04-39-02.146933.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-25T04-39-02.146933.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-25T04-39-02.146933.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-25T04-39-02.146933.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-25T04-39-02.146933.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-25T04-39-02.146933.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-25T04-39-02.146933.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-25T04-39-02.146933.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-25T04-39-02.146933.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-25T04-39-02.146933.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-25T04-39-02.146933.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-25T04-39-02.146933.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-25T04-39-02.146933.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-25T04-39-02.146933.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-25T04-39-02.146933.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-25T04-39-02.146933.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-25T04-39-02.146933.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-25T04-39-02.146933.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-25T04-39-02.146933.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-25T04-39-02.146933.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-25T04-39-02.146933.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-25T04-39-02.146933.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-25T04-39-02.146933.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-25T04-39-02.146933.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-25T04-39-02.146933.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-25T04-39-02.146933.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-25T04-39-02.146933.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-25T04-39-02.146933.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-25T04-39-02.146933.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-25T04-39-02.146933.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-25T04-39-02.146933.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-25T04-39-02.146933.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-25T04-39-02.146933.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-25T04-39-02.146933.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-25T04-39-02.146933.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-25T04-39-02.146933.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-25T04-39-02.146933.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-25T04-39-02.146933.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-25T04-39-02.146933.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-25T04-39-02.146933.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-25T04-39-02.146933.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-25T04-39-02.146933.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-25T04-39-02.146933.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-25T04-39-02.146933.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-25T04-39-02.146933.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-25T04-39-02.146933.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-25T04-39-02.146933.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-25T04-39-02.146933.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-25T04-39-02.146933.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-25T04-39-02.146933.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-25T04-39-02.146933.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-25T04-39-02.146933.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-25T04-39-02.146933.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-25T04-39-02.146933.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-25T04-39-02.146933.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-25T04-39-02.146933.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-25T04-39-02.146933.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-25T04-39-02.146933.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-25T04-39-02.146933.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-25T04-39-02.146933.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-25T04-39-02.146933.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-25T04-39-02.146933.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-25T04-39-02.146933.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-25T04-39-02.146933.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-25T04-39-02.146933.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-25T04-39-02.146933.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-25T04-39-02.146933.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-25T04-39-02.146933.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-25T04-39-02.146933.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-25T04-39-02.146933.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-25T04-39-02.146933.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-25T04-39-02.146933.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-25T04-39-02.146933.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-25T04-39-02.146933.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-25T04-39-02.146933.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-25T04-39-02.146933.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-25T04-39-02.146933.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-25T04-39-02.146933.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-25T04-39-02.146933.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-25T04-39-02.146933.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-25T04-39-02.146933.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-25T04-39-02.146933.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-25T04-39-02.146933.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-25T04-39-02.146933.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-25T04-39-02.146933.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-25T04-39-02.146933.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-25T04-39-02.146933.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_01_25T04_39_02.146933", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-25T04-39-02.146933.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-25T04-39-02.146933.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_01_25T04_39_02.146933", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-25T04-39-02.146933.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-25T04-39-02.146933.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_01_25T04_39_02.146933", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-25T04-39-02.146933.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-25T04-39-02.146933.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_01_25T04_39_02.146933", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-25T04-39-02.146933.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-25T04-39-02.146933.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_01_25T04_39_02.146933", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-25T04-39-02.146933.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-25T04-39-02.146933.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_01_25T04_39_02.146933", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-25T04-39-02.146933.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-25T04-39-02.146933.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_01_25T04_39_02.146933", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-25T04-39-02.146933.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-25T04-39-02.146933.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_01_25T04_39_02.146933", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-25T04-39-02.146933.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-25T04-39-02.146933.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_01_25T04_39_02.146933", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-25T04-39-02.146933.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-25T04-39-02.146933.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_01_25T04_39_02.146933", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-25T04-39-02.146933.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-25T04-39-02.146933.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_01_25T04_39_02.146933", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-25T04-39-02.146933.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-25T04-39-02.146933.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_01_25T04_39_02.146933", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-25T04-39-02.146933.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-25T04-39-02.146933.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_01_25T04_39_02.146933", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-25T04-39-02.146933.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-25T04-39-02.146933.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_01_25T04_39_02.146933", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-25T04-39-02.146933.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-25T04-39-02.146933.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_01_25T04_39_02.146933", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-25T04-39-02.146933.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-25T04-39-02.146933.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_01_25T04_39_02.146933", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-25T04-39-02.146933.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-25T04-39-02.146933.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_01_25T04_39_02.146933", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-25T04-39-02.146933.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-25T04-39-02.146933.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_01_25T04_39_02.146933", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-25T04-39-02.146933.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-25T04-39-02.146933.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_01_25T04_39_02.146933", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-25T04-39-02.146933.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-25T04-39-02.146933.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_01_25T04_39_02.146933", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-25T04-39-02.146933.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-25T04-39-02.146933.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_01_25T04_39_02.146933", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-25T04-39-02.146933.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-25T04-39-02.146933.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_01_25T04_39_02.146933", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-25T04-39-02.146933.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-25T04-39-02.146933.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_01_25T04_39_02.146933", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-25T04-39-02.146933.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-25T04-39-02.146933.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_01_25T04_39_02.146933", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-25T04-39-02.146933.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-25T04-39-02.146933.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_01_25T04_39_02.146933", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-25T04-39-02.146933.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-25T04-39-02.146933.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_01_25T04_39_02.146933", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-25T04-39-02.146933.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-25T04-39-02.146933.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_01_25T04_39_02.146933", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-25T04-39-02.146933.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-25T04-39-02.146933.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_01_25T04_39_02.146933", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-25T04-39-02.146933.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-25T04-39-02.146933.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_01_25T04_39_02.146933", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-25T04-39-02.146933.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-25T04-39-02.146933.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_01_25T04_39_02.146933", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-25T04-39-02.146933.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-25T04-39-02.146933.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_01_25T04_39_02.146933", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-25T04-39-02.146933.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-25T04-39-02.146933.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_01_25T04_39_02.146933", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-25T04-39-02.146933.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-25T04-39-02.146933.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_01_25T04_39_02.146933", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-25T04-39-02.146933.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-25T04-39-02.146933.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_01_25T04_39_02.146933", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-25T04-39-02.146933.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-25T04-39-02.146933.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_01_25T04_39_02.146933", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-25T04-39-02.146933.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-25T04-39-02.146933.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_01_25T04_39_02.146933", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-25T04-39-02.146933.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-25T04-39-02.146933.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_01_25T04_39_02.146933", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-25T04-39-02.146933.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-25T04-39-02.146933.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_01_25T04_39_02.146933", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-25T04-39-02.146933.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-25T04-39-02.146933.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_01_25T04_39_02.146933", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-25T04-39-02.146933.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-25T04-39-02.146933.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_01_25T04_39_02.146933", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-25T04-39-02.146933.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-25T04-39-02.146933.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_01_25T04_39_02.146933", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-25T04-39-02.146933.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-25T04-39-02.146933.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_01_25T04_39_02.146933", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-25T04-39-02.146933.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-25T04-39-02.146933.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_01_25T04_39_02.146933", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-25T04-39-02.146933.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-25T04-39-02.146933.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_01_25T04_39_02.146933", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-25T04-39-02.146933.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-25T04-39-02.146933.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_01_25T04_39_02.146933", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-25T04-39-02.146933.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-25T04-39-02.146933.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_01_25T04_39_02.146933", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-25T04-39-02.146933.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-25T04-39-02.146933.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_01_25T04_39_02.146933", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-25T04-39-02.146933.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-25T04-39-02.146933.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_01_25T04_39_02.146933", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-25T04-39-02.146933.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-25T04-39-02.146933.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_01_25T04_39_02.146933", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-25T04-39-02.146933.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-25T04-39-02.146933.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_01_25T04_39_02.146933", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-25T04-39-02.146933.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-25T04-39-02.146933.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_01_25T04_39_02.146933", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-25T04-39-02.146933.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-25T04-39-02.146933.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_01_25T04_39_02.146933", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-25T04-39-02.146933.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-25T04-39-02.146933.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_01_25T04_39_02.146933", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-25T04-39-02.146933.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-25T04-39-02.146933.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_01_25T04_39_02.146933", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-25T04-39-02.146933.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-25T04-39-02.146933.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_01_25T04_39_02.146933", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-25T04-39-02.146933.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-25T04-39-02.146933.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_01_25T04_39_02.146933", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-25T04-39-02.146933.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-25T04-39-02.146933.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_01_25T04_39_02.146933", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-25T04-39-02.146933.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-25T04-39-02.146933.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_01_25T04_39_02.146933", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-25T04-39-02.146933.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-25T04-39-02.146933.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_01_25T04_39_02.146933", "path": ["**/details_harness|winogrande|5_2024-01-25T04-39-02.146933.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-01-25T04-39-02.146933.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_01_25T04_39_02.146933", "path": ["results_2024-01-25T04-39-02.146933.parquet"]}, {"split": "latest", "path": ["results_2024-01-25T04-39-02.146933.parquet"]}]}]}
2024-01-25T04:41:51+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of dominguesm/canarim-7b Dataset automatically created during the evaluation run of model dominguesm/canarim-7b on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-01-25T04:39:02.146933(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of dominguesm/canarim-7b\n\n\n\nDataset automatically created during the evaluation run of model dominguesm/canarim-7b on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-25T04:39:02.146933(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of dominguesm/canarim-7b\n\n\n\nDataset automatically created during the evaluation run of model dominguesm/canarim-7b on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-25T04:39:02.146933(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ 6, 177, 68, 4, 40, 29, 3, 4, 9, 6, 5, 7, 4, 7, 10, 9, 5, 9, 8, 10, 46, 8, 7, 10, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of dominguesm/canarim-7b\n\n\n\nDataset automatically created during the evaluation run of model dominguesm/canarim-7b on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2024-01-25T04:39:02.146933(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):## Dataset Details### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:## Uses### Direct Use### Out-of-Scope Use## Dataset Structure## Dataset Creation### Curation Rationale### Source Data#### Data Collection and Processing#### Who are the source data producers?### Annotations [optional]#### Annotation process#### Who are the annotators?#### Personal and Sensitive Information## Bias, Risks, and Limitations### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:## Glossary [optional]## More Information [optional]## Dataset Card Authors [optional]## Dataset Card Contact" ]
[ -0.050316959619522095, 0.18522360920906067, -0.006951120216399431, 0.037782181054353714, 0.06495817005634308, 0.00048330152640119195, 0.040149446576833725, 0.10840009152889252, 0.03459682688117027, 0.16202618181705475, -0.002525694901123643, 0.09934909641742706, 0.0947338119149208, 0.110084168612957, 0.028605995699763298, -0.1448858231306076, 0.052713848650455475, -0.08540979772806168, 0.09089148789644241, 0.072262704372406, 0.06839562207460403, -0.08230461925268173, 0.05821020528674126, -0.03193524852395058, 0.03149421885609627, -0.005958407651633024, -0.06119057908654213, -0.042790014296770096, 0.10467082262039185, 0.07324175536632538, 0.05105610564351082, -0.025570986792445183, 0.029651125892996788, -0.275429368019104, 0.019215375185012817, 0.09923066943883896, 0.0008985352469608188, 0.048942338675260544, 0.14008991420269012, -0.08841019123792648, 0.1036248505115509, -0.03231915831565857, 0.07412120699882507, 0.04250422492623329, -0.12783312797546387, -0.1564304083585739, -0.13741010427474976, 0.022908709943294525, 0.07161545008420944, 0.048147350549697876, -0.020410815253853798, 0.10600356757640839, -0.051503825932741165, 0.04853574186563492, 0.13449835777282715, -0.1835455447435379, -0.030369561165571213, 0.057493217289447784, 0.02856559120118618, 0.055539570748806, -0.08728430420160294, -0.03218012675642967, 0.02503816783428192, 0.050423748791217804, 0.0017179364804178476, 0.0061430116184055805, 0.05181112512946129, 0.022199543192982674, -0.14726710319519043, -0.13191406428813934, 0.10674881935119629, -0.011027758941054344, -0.03840085491538048, -0.15730677545070648, -0.05789633467793465, -0.020366469398140907, 0.007339744362980127, 0.014174875803291798, 0.012106457725167274, 0.002973316004499793, 0.06711925566196442, 0.0017827582778409123, -0.09059325605630875, -0.03436662256717682, -0.03840504586696625, 0.03071029856801033, 0.03481101244688034, -0.0005640348535962403, -0.0019474998116493225, 0.13858577609062195, 0.041379448026418686, -0.06367097049951553, -0.08555223047733307, -0.04459767043590546, -0.12793081998825073, -0.035344917327165604, 0.0244723130017519, -0.06853261590003967, 0.04749695584177971, 0.25143831968307495, -0.03600803390145302, 0.03189955651760101, -0.08889080584049225, 0.018914587795734406, 0.11247752606868744, 0.09704030305147171, -0.06824053078889847, -0.08164342492818832, -0.050579097121953964, 0.014249702915549278, 0.02590494602918625, -0.029922671616077423, 0.029784567654132843, 0.07011129707098007, 0.021039314568042755, 0.13656337559223175, 0.11337961256504059, 0.024227114394307137, -0.071675606071949, -0.014909966848790646, 0.14662082493305206, -0.16745410859584808, 0.009166956879198551, 0.020751984789967537, -0.02899472415447235, -0.07278590649366379, 0.06303556263446808, -0.021864181384444237, -0.06276597827672958, 0.1095835417509079, -0.06304074078798294, -0.06611993163824081, -0.10047829151153564, -0.06823373585939407, 0.04260140657424927, -0.02045215107500553, -0.06474872678518295, -0.059762269258499146, -0.13950496912002563, -0.08166312426328659, 0.03361440449953079, -0.0669703260064125, -0.0046582347713410854, 0.0024610816035419703, 0.020615804940462112, -0.014255717396736145, -0.008975761942565441, 0.10040103644132614, -0.0771719440817833, 0.03552006930112839, -0.042975109070539474, 0.04081007465720177, 0.1050911694765091, 0.024989765137434006, -0.1266372948884964, 0.09787605702877045, -0.09191203117370605, 0.09516768902540207, -0.08691830188035965, -0.026514599099755287, -0.11754269152879715, 0.015527996234595776, -0.019193358719348907, 0.018222972750663757, -0.01099434308707714, 0.08200167864561081, -0.20418955385684967, -0.011659171432256699, 0.1877492517232895, -0.12371235340833664, -0.058342672884464264, 0.07009734958410263, -0.026047339662909508, 0.06143546476960182, 0.045937586575746536, 0.07511899620294571, 0.08116759359836578, -0.0807580053806305, -0.09873968362808228, -0.06258901953697205, -0.03848292678594589, 0.1500634402036667, 0.05689888447523117, -0.08517717570066452, 0.08936172723770142, 0.030636176466941833, 0.013737167231738567, -0.04938269034028053, -0.012702744454145432, -0.060983892530202866, -0.0029638586565852165, -0.021658802404999733, -0.0713256448507309, -0.020737597718834877, -0.09298081696033478, -0.001330590108409524, -0.062149666249752045, -0.0033255249727517366, 0.09139034152030945, -0.0225241519510746, 0.027502428740262985, -0.08875446766614914, 0.07196686416864395, -0.015902988612651825, 0.02166159264743328, -0.206358402967453, -0.09243863821029663, 0.03584546223282814, -0.17492061853408813, 0.045065879821777344, 0.008157534524798393, 0.018800970166921616, 0.0520450621843338, -0.0021177823655307293, 0.011228778399527073, 0.02025878056883812, -0.012141224928200245, -0.003747825976461172, -0.15001651644706726, -0.03483022004365921, -0.06947444379329681, 0.07353944331407547, -0.11027227342128754, -0.02349824458360672, 0.0729411393404007, 0.157602459192276, 0.01871505379676819, -0.07494134455919266, 0.04123714193701744, 0.02280900627374649, -0.05137412250041962, -0.05622023344039917, 0.006819861009716988, -0.006700552999973297, 0.029295215383172035, 0.06770507991313934, -0.18041972815990448, -0.1496938019990921, 0.07462147623300552, 0.126581072807312, -0.07450119405984879, -0.06492672860622406, -0.06863952428102493, -0.05911189690232277, -0.09581556171178818, -0.04898843169212341, 0.08900240808725357, 0.08667702227830887, 0.053018175065517426, -0.06517577916383743, -0.04884418100118637, -0.012626208364963531, 0.03671969473361969, -0.0674058049917221, 0.10877002775669098, 0.09532016515731812, -0.09111838787794113, 0.10115811228752136, -0.012115593999624252, 0.10438384860754013, 0.11479870229959488, 0.008375789038836956, -0.11944903433322906, -0.019880184903740883, 0.06805330514907837, 0.04738116264343262, 0.08154314756393433, -0.021386081352829933, 0.049566444009542465, 0.08180312067270279, 0.0014058230444788933, 0.03422611206769943, -0.07946690917015076, 0.033415235579013824, 0.03135019913315773, -0.012116757221519947, 0.005118181928992271, 0.0059091150760650635, 0.030240178108215332, 0.09062159806489944, 0.012614322826266289, 0.0663866400718689, -0.024108225479722023, -0.04711471125483513, -0.08297798037528992, 0.14133009314537048, -0.0928998738527298, -0.23106776177883148, -0.16432282328605652, -0.04320455715060234, -0.03125939890742302, -0.01224291231483221, 0.04719341918826103, 0.01161646656692028, -0.10063733905553818, -0.10556411743164062, 0.05182952061295509, 0.04110225290060043, -0.12036190927028656, -0.04159925878047943, 0.023166364058852196, -0.007249507121741772, -0.1650501787662506, 0.021829411387443542, 0.033933188766241074, -0.07072292268276215, 0.024331487715244293, 0.08111420273780823, 0.12467993795871735, 0.11358984559774399, 0.08398086577653885, -0.02124151401221752, -0.010463610291481018, 0.16520796716213226, -0.11489223688840866, 0.029049310833215714, 0.08663364499807358, -0.043703146278858185, 0.07711071521043777, 0.15090513229370117, 0.006703541148453951, -0.08132654428482056, 0.03581123426556587, 0.09732207655906677, -0.07003811746835709, -0.25961965322494507, -0.07824766635894775, -0.033525679260492325, 0.06753183156251907, 0.10559998452663422, 0.0808478593826294, -0.007644394878298044, 0.0007366336067207158, -0.10502411425113678, -0.04718563333153725, -0.025143155828118324, 0.06886689364910126, 0.051384925842285156, -0.009974461980164051, 0.04883307218551636, -0.05490898713469505, 0.020891107618808746, 0.12003634870052338, 0.03492039814591408, 0.176547110080719, -0.03587811812758446, 0.18148377537727356, 0.10329627990722656, 0.07632511109113693, -0.01890414208173752, 0.07826428860425949, -0.011141068302094936, 0.07031239569187164, -0.010073411278426647, -0.08651833981275558, -0.029304906725883484, 0.09223628044128418, 0.05392036586999893, -0.03093530237674713, 0.060166288167238235, -0.063441701233387, 0.06606872379779816, 0.26124754548072815, 0.011527304537594318, -0.15047447383403778, -0.03281101584434509, 0.04776255413889885, -0.04562680423259735, -0.1005290076136589, 0.001178029808215797, 0.07154163718223572, -0.15634730458259583, 0.03353350609540939, -0.03924756869673729, 0.07327529042959213, -0.14558149874210358, -0.033915285021066666, -0.016839809715747833, 0.04802524298429489, -0.025249570608139038, 0.09622906893491745, -0.1648535281419754, 0.0937742069363594, -0.015612361021339893, 0.021423237398266792, -0.06576204299926758, 0.06872563064098358, -0.014536706730723381, -0.06850775331258774, 0.14749370515346527, -0.0018343407427892089, -0.09905031323432922, -0.06590504944324493, -0.11859238892793655, -0.01282609160989523, 0.04015357047319412, -0.10582374781370163, 0.1189945861697197, 0.014988677576184273, -0.02198958769440651, -0.04488536715507507, -0.006397038698196411, -0.07535597681999207, -0.20888245105743408, 0.08975320309400558, -0.1162317618727684, 0.05677782744169235, -0.04991242662072182, -0.03677656874060631, -0.05127518251538277, 0.15579988062381744, -0.1291811168193817, -0.06679467856884003, -0.1034339889883995, -0.03723926097154617, 0.14463917911052704, -0.06549599766731262, 0.0539076142013073, -0.048724446445703506, 0.16543877124786377, -0.031723134219646454, -0.04961852729320526, 0.019714966416358948, -0.06775182485580444, -0.1688472479581833, -0.04593086615204811, 0.11828726530075073, 0.06256535649299622, 0.010910781100392342, -0.007559035439044237, 0.06187458708882332, 0.007599960546940565, -0.09500700235366821, 0.041708625853061676, 0.12113626301288605, 0.09480038285255432, 0.060833267867565155, -0.0354439876973629, -0.082871213555336, -0.09110194444656372, -0.07879875600337982, 0.0694638267159462, 0.18073123693466187, -0.052877917885780334, 0.1355629414319992, 0.12916095554828644, -0.11711061000823975, -0.2043921798467636, -0.09211385995149612, -0.018496142700314522, -0.013576074503362179, 0.0983552485704422, -0.19734901189804077, 0.045949701219797134, 0.08878215402364731, -0.023367183282971382, 0.12140575051307678, -0.27813565731048584, -0.1339808702468872, 0.05100168287754059, 0.03464255854487419, -0.1689291149377823, -0.1329313963651657, -0.09051742404699326, -0.0170514527708292, -0.12648320198059082, 0.11635883897542953, -0.018171608448028564, 0.04720274358987808, -0.020244378596544266, 0.051702819764614105, 0.03550977259874344, -0.0634140819311142, 0.13462816178798676, -0.012337020598351955, 0.035745032131671906, -0.08796630799770355, -0.009396165609359741, -0.006885419134050608, -0.04649674519896507, 0.06749933212995529, 0.026698419824242592, 0.0260624997317791, -0.06678660213947296, -0.04333454370498657, -0.04574546962976456, 0.034521542489528656, -0.0664137527346611, -0.06374774128198624, -0.057006388902664185, 0.07843116670846939, 0.07772629708051682, -0.012717925943434238, 0.02910241112112999, -0.04932752996683121, 0.05580493435263634, 0.2253204584121704, 0.06350990384817123, 0.040260277688503265, -0.12000461667776108, -0.03933975100517273, -0.007722220849245787, 0.008249488659203053, -0.08264528214931488, 0.05257280543446541, 0.09564165025949478, 0.035925090312957764, 0.11146821826696396, -0.006681805010885, -0.1970216929912567, -0.0019992319867014885, 0.08156947046518326, -0.10447680950164795, -0.1950794905424118, 0.052588045597076416, 0.02560298889875412, -0.10614017397165298, -0.08621393889188766, 0.08920860290527344, 0.028531396761536598, -0.014008293859660625, 0.016664214432239532, 0.07088790833950043, 0.03525831177830696, 0.09035748988389969, -0.03064991720020771, 0.04299294576048851, -0.07475720345973969, 0.12729685008525848, 0.1451491415500641, -0.11645760387182236, -0.01227294746786356, 0.06470297276973724, -0.0448504202067852, -0.05753529444336891, -0.04921436682343483, 0.056397952139377594, -0.007841164246201515, -0.04246821254491806, -0.01394712831825018, -0.06852368265390396, 0.08095818012952805, 0.1414227932691574, -0.005406356416642666, 0.07569363713264465, 0.016149910166859627, -0.0041996487416327, -0.04809137061238289, 0.10991831868886948, 0.04076390713453293, 0.03931843116879463, -0.018017295747995377, 0.0374777652323246, 0.017057333141565323, -0.020850583910942078, 0.01752355694770813, -0.06783679127693176, -0.06338518112897873, 0.018193762749433517, -0.1720719337463379, 0.025375517085194588, -0.08834703266620636, -0.012688398361206055, 0.007653184700757265, 0.01291801780462265, -0.0029046949930489063, 0.010494655929505825, -0.05505949258804321, -0.05133145675063133, -0.04619142413139343, 0.1189398393034935, -0.20584918558597565, -0.004933889023959637, 0.08365153521299362, -0.08293119817972183, 0.07472144067287445, -0.0023327756207436323, -0.009657989256083965, 0.010357008315622807, -0.08825961500406265, -0.0004095247422810644, -0.01798516884446144, 0.04112810641527176, 0.012842077761888504, -0.14673343300819397, -0.011177057400345802, -0.0009769776370376348, -0.10090679675340652, 0.0014906361466273665, -0.0038733992259949446, -0.1431192010641098, 0.07215265184640884, 0.09315163642168045, -0.050915926694869995, -0.029951758682727814, 0.02372078225016594, 0.030796313658356667, 0.016884034499526024, 0.09717590361833572, -0.028334062546491623, 0.03968155384063721, -0.15310953557491302, -0.037083860486745834, 0.007107340730726719, 0.006569999270141125, 0.04387291520833969, 0.0026666203048080206, 0.03095521591603756, -0.01774413138628006, 0.22605003416538239, -0.029225140810012817, -0.0024958085268735886, 0.026681974530220032, -0.002978002419695258, -0.05785617604851723, 0.02869124710559845, -0.01648213341832161, 0.014985953457653522, 0.011535019613802433, 0.010923968628048897, -0.01971481367945671, -0.046914029866456985, 0.01807601936161518, 0.10221728682518005, 0.10884364694356918, 0.2265089452266693, -0.030411601066589355, 0.037366315722465515, -0.13553443551063538, -0.06922364234924316, -0.0007353759719990194, -0.06584207713603973, 0.05720983445644379, -0.05033816024661064, 0.05898189917206764, 0.10838045179843903, -0.13478687405586243, 0.13172444701194763, -0.04291270673274994, -0.02547813579440117, -0.055576980113983154, -0.19281385838985443, -0.03693368658423424, 0.0208734180778265, 0.002703124424442649, -0.08826450258493423, 0.10647476464509964, 0.1286775916814804, 0.015164603479206562, -0.008228341117501259, 0.06799691170454025, -0.09710061550140381, -0.05787532404065132, -0.031073324382305145, 0.030328501015901566, 0.03725384920835495, 0.01640118472278118, 0.059563569724559784, -0.007266880478709936, 0.052985675632953644, 0.07558246701955795, 0.0955083966255188, 0.06395645439624786, 0.04452987387776375, -0.03257518634200096, -0.03968902677297592, 0.0016989983851090074, -0.018106041476130486, -0.05833239108324051, 0.17827758193016052, 0.06401778757572174, 0.027681628242135048, 0.021881742402911186, 0.1949464976787567, -0.01288588996976614, -0.07230787724256516, -0.13775275647640228, 0.172287255525589, -0.003988269716501236, 0.027280420064926147, 0.02519804798066616, -0.11693165451288223, 0.002837229985743761, 0.14666897058486938, 0.09646455943584442, 0.016375072300434113, 0.013616083189845085, 0.03961312398314476, 0.023808678612113, -0.0260773915797472, 0.029848892241716385, 0.038839925080537796, 0.22083160281181335, -0.051652807742357254, 0.07740507274866104, -0.03901735320687294, -0.017315169796347618, -0.03590565547347069, 0.11624065041542053, -0.05247374624013901, 0.017562847584486008, -0.07083559781312943, 0.07063990831375122, -0.07722398638725281, -0.2273941934108734, 0.019465211778879166, -0.0573916956782341, -0.13405773043632507, -0.0035151091869920492, 0.03419412672519684, -0.02678229846060276, 0.04243074730038643, 0.039014361798763275, -0.032206788659095764, 0.1823575794696808, 0.0185083020478487, -0.06035692244768143, -0.08408155292272568, 0.06501811742782593, -0.04448259249329567, 0.2811453938484192, 0.004264957271516323, 0.02296925149857998, 0.08289425820112228, -0.0036470748018473387, -0.12932364642620087, 0.052606984972953796, 0.09382060915231705, -0.0676756203174591, 0.033378083258867264, 0.12150000780820847, -0.013348307460546494, 0.12942317128181458, 0.049995534121990204, 0.012172493152320385, 0.07427003979682922, 0.048730090260505676, 0.024426555261015892, -0.08221407979726791, 0.05080859735608101, -0.0806843712925911, 0.11762189120054245, 0.13480745255947113, -0.010208634659647942, 0.012670524418354034, -0.05522403493523598, 0.051687125116586685, -0.049612097442150116, 0.09446295350790024, -0.014513620175421238, -0.13062620162963867, 0.06079098582267761, 0.02298375591635704, 0.07890716195106506, -0.1888921707868576, -0.08001383394002914, 0.09509116411209106, -0.05740761384367943, -0.016627570614218712, 0.09643804281949997, 0.029533620923757553, 0.02535375766456127, -0.049182549118995667, -0.11769071966409683, 0.029976336285471916, 0.10136672109365463, -0.06521677225828171, -0.04397553950548172 ]
088cfdf9d50dd7e0072da691f3264f41033fdf06
# Dataset Card for Evaluation run of 22h/open-cabrita3b <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [22h/open-cabrita3b](https://huggingface.co/22h/open-cabrita3b) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_22h__open-cabrita3b", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-01-25T04:42:07.614835](https://huggingface.co/datasets/open-llm-leaderboard/details_22h__open-cabrita3b/blob/main/results_2024-01-25T04-42-07.614835.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.25694742738720877, "acc_stderr": 0.030910338030416252, "acc_norm": 0.2582241838050863, "acc_norm_stderr": 0.03168470128261473, "mc1": 0.2484700122399021, "mc1_stderr": 0.015127427096520681, "mc2": 0.38499408711109195, "mc2_stderr": 0.014009970143005233 }, "harness|arc:challenge|25": { "acc": 0.30631399317406144, "acc_stderr": 0.01347058441727651, "acc_norm": 0.3378839590443686, "acc_norm_stderr": 0.013822047922283514 }, "harness|hellaswag|10": { "acc": 0.41943835889265085, "acc_stderr": 0.004924586362301657, "acc_norm": 0.5534754033061143, "acc_norm_stderr": 0.004961161589228395 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.31, "acc_stderr": 0.04648231987117316, "acc_norm": 0.31, "acc_norm_stderr": 0.04648231987117316 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.21481481481481482, "acc_stderr": 0.03547854198560826, "acc_norm": 0.21481481481481482, "acc_norm_stderr": 0.03547854198560826 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.20394736842105263, "acc_stderr": 0.032790004063100515, "acc_norm": 0.20394736842105263, "acc_norm_stderr": 0.032790004063100515 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.3, "acc_stderr": 0.046056618647183814, "acc_norm": 0.3, "acc_norm_stderr": 0.046056618647183814 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.22641509433962265, "acc_stderr": 0.025757559893106744, "acc_norm": 0.22641509433962265, "acc_norm_stderr": 0.025757559893106744 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.24305555555555555, "acc_stderr": 0.03586879280080341, "acc_norm": 0.24305555555555555, "acc_norm_stderr": 0.03586879280080341 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.16, "acc_stderr": 0.03684529491774709, "acc_norm": 0.16, "acc_norm_stderr": 0.03684529491774709 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.25, "acc_stderr": 0.04351941398892446, "acc_norm": 0.25, "acc_norm_stderr": 0.04351941398892446 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.29, "acc_stderr": 0.045604802157206845, "acc_norm": 0.29, "acc_norm_stderr": 0.045604802157206845 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.21965317919075145, "acc_stderr": 0.031568093627031744, "acc_norm": 0.21965317919075145, "acc_norm_stderr": 0.031568093627031744 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.23529411764705882, "acc_stderr": 0.04220773659171453, "acc_norm": 0.23529411764705882, "acc_norm_stderr": 0.04220773659171453 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.3, "acc_stderr": 0.046056618647183814, "acc_norm": 0.3, "acc_norm_stderr": 0.046056618647183814 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.3148936170212766, "acc_stderr": 0.030363582197238167, "acc_norm": 0.3148936170212766, "acc_norm_stderr": 0.030363582197238167 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.23684210526315788, "acc_stderr": 0.03999423879281337, "acc_norm": 0.23684210526315788, "acc_norm_stderr": 0.03999423879281337 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.22758620689655173, "acc_stderr": 0.03493950380131184, "acc_norm": 0.22758620689655173, "acc_norm_stderr": 0.03493950380131184 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.2751322751322751, "acc_stderr": 0.02300008685906864, "acc_norm": 0.2751322751322751, "acc_norm_stderr": 0.02300008685906864 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.25396825396825395, "acc_stderr": 0.03893259610604671, "acc_norm": 0.25396825396825395, "acc_norm_stderr": 0.03893259610604671 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.3, "acc_stderr": 0.046056618647183814, "acc_norm": 0.3, "acc_norm_stderr": 0.046056618647183814 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.1870967741935484, "acc_stderr": 0.02218571009225225, "acc_norm": 0.1870967741935484, "acc_norm_stderr": 0.02218571009225225 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.1921182266009852, "acc_stderr": 0.027719315709614775, "acc_norm": 0.1921182266009852, "acc_norm_stderr": 0.027719315709614775 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.26, "acc_stderr": 0.04408440022768079, "acc_norm": 0.26, "acc_norm_stderr": 0.04408440022768079 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.21818181818181817, "acc_stderr": 0.03225078108306289, "acc_norm": 0.21818181818181817, "acc_norm_stderr": 0.03225078108306289 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.20707070707070707, "acc_stderr": 0.02886977846026705, "acc_norm": 0.20707070707070707, "acc_norm_stderr": 0.02886977846026705 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.21243523316062177, "acc_stderr": 0.029519282616817244, "acc_norm": 0.21243523316062177, "acc_norm_stderr": 0.029519282616817244 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.24871794871794872, "acc_stderr": 0.0219169577092138, "acc_norm": 0.24871794871794872, "acc_norm_stderr": 0.0219169577092138 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.2740740740740741, "acc_stderr": 0.027195934804085622, "acc_norm": 0.2740740740740741, "acc_norm_stderr": 0.027195934804085622 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.21008403361344538, "acc_stderr": 0.026461398717471874, "acc_norm": 0.21008403361344538, "acc_norm_stderr": 0.026461398717471874 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.2251655629139073, "acc_stderr": 0.034104352820089376, "acc_norm": 0.2251655629139073, "acc_norm_stderr": 0.034104352820089376 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.22568807339449543, "acc_stderr": 0.017923087667803053, "acc_norm": 0.22568807339449543, "acc_norm_stderr": 0.017923087667803053 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.22685185185185186, "acc_stderr": 0.028561650102422276, "acc_norm": 0.22685185185185186, "acc_norm_stderr": 0.028561650102422276 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.2549019607843137, "acc_stderr": 0.030587591351604243, "acc_norm": 0.2549019607843137, "acc_norm_stderr": 0.030587591351604243 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.2616033755274262, "acc_stderr": 0.028609516716994934, "acc_norm": 0.2616033755274262, "acc_norm_stderr": 0.028609516716994934 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.35874439461883406, "acc_stderr": 0.032190792004199956, "acc_norm": 0.35874439461883406, "acc_norm_stderr": 0.032190792004199956 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.2900763358778626, "acc_stderr": 0.03980066246467765, "acc_norm": 0.2900763358778626, "acc_norm_stderr": 0.03980066246467765 }, "harness|hendrycksTest-international_law|5": { "acc": 0.2396694214876033, "acc_stderr": 0.03896878985070417, "acc_norm": 0.2396694214876033, "acc_norm_stderr": 0.03896878985070417 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.3148148148148148, "acc_stderr": 0.04489931073591312, "acc_norm": 0.3148148148148148, "acc_norm_stderr": 0.04489931073591312 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.18404907975460122, "acc_stderr": 0.030446777687971743, "acc_norm": 0.18404907975460122, "acc_norm_stderr": 0.030446777687971743 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.32142857142857145, "acc_stderr": 0.0443280405529152, "acc_norm": 0.32142857142857145, "acc_norm_stderr": 0.0443280405529152 }, "harness|hendrycksTest-management|5": { "acc": 0.2912621359223301, "acc_stderr": 0.04498676320572922, "acc_norm": 0.2912621359223301, "acc_norm_stderr": 0.04498676320572922 }, "harness|hendrycksTest-marketing|5": { "acc": 0.28205128205128205, "acc_stderr": 0.02948036054954119, "acc_norm": 0.28205128205128205, "acc_norm_stderr": 0.02948036054954119 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.29, "acc_stderr": 0.045604802157206845, "acc_norm": 0.29, "acc_norm_stderr": 0.045604802157206845 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.2656449553001277, "acc_stderr": 0.01579430248788872, "acc_norm": 0.2656449553001277, "acc_norm_stderr": 0.01579430248788872 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.23699421965317918, "acc_stderr": 0.02289408248992599, "acc_norm": 0.23699421965317918, "acc_norm_stderr": 0.02289408248992599 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.23575418994413408, "acc_stderr": 0.014196375686290804, "acc_norm": 0.23575418994413408, "acc_norm_stderr": 0.014196375686290804 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.238562091503268, "acc_stderr": 0.024404394928087873, "acc_norm": 0.238562091503268, "acc_norm_stderr": 0.024404394928087873 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.2572347266881029, "acc_stderr": 0.024826171289250888, "acc_norm": 0.2572347266881029, "acc_norm_stderr": 0.024826171289250888 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.2345679012345679, "acc_stderr": 0.023576881744005712, "acc_norm": 0.2345679012345679, "acc_norm_stderr": 0.023576881744005712 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.24468085106382978, "acc_stderr": 0.02564555362226673, "acc_norm": 0.24468085106382978, "acc_norm_stderr": 0.02564555362226673 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.23859191655801826, "acc_stderr": 0.010885929742002202, "acc_norm": 0.23859191655801826, "acc_norm_stderr": 0.010885929742002202 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.1948529411764706, "acc_stderr": 0.024060599423487424, "acc_norm": 0.1948529411764706, "acc_norm_stderr": 0.024060599423487424 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.24836601307189543, "acc_stderr": 0.017479487001364764, "acc_norm": 0.24836601307189543, "acc_norm_stderr": 0.017479487001364764 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.24545454545454545, "acc_stderr": 0.04122066502878285, "acc_norm": 0.24545454545454545, "acc_norm_stderr": 0.04122066502878285 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.2, "acc_stderr": 0.025607375986579153, "acc_norm": 0.2, "acc_norm_stderr": 0.025607375986579153 }, "harness|hendrycksTest-sociology|5": { "acc": 0.26865671641791045, "acc_stderr": 0.03134328358208954, "acc_norm": 0.26865671641791045, "acc_norm_stderr": 0.03134328358208954 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.28, "acc_stderr": 0.045126085985421276, "acc_norm": 0.28, "acc_norm_stderr": 0.045126085985421276 }, "harness|hendrycksTest-virology|5": { "acc": 0.29518072289156627, "acc_stderr": 0.035509201856896294, "acc_norm": 0.29518072289156627, "acc_norm_stderr": 0.035509201856896294 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.3216374269005848, "acc_stderr": 0.03582529442573122, "acc_norm": 0.3216374269005848, "acc_norm_stderr": 0.03582529442573122 }, "harness|truthfulqa:mc|0": { "mc1": 0.2484700122399021, "mc1_stderr": 0.015127427096520681, "mc2": 0.38499408711109195, "mc2_stderr": 0.014009970143005233 }, "harness|winogrande|5": { "acc": 0.5943172849250198, "acc_stderr": 0.013800206336014205 }, "harness|gsm8k|5": { "acc": 0.009855951478392721, "acc_stderr": 0.002721076577041661 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_22h__open-cabrita3b
[ "region:us" ]
2024-01-25T04:43:51+00:00
{"pretty_name": "Evaluation run of 22h/open-cabrita3b", "dataset_summary": "Dataset automatically created during the evaluation run of model [22h/open-cabrita3b](https://huggingface.co/22h/open-cabrita3b) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_22h__open-cabrita3b\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-01-25T04:42:07.614835](https://huggingface.co/datasets/open-llm-leaderboard/details_22h__open-cabrita3b/blob/main/results_2024-01-25T04-42-07.614835.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.25694742738720877,\n \"acc_stderr\": 0.030910338030416252,\n \"acc_norm\": 0.2582241838050863,\n \"acc_norm_stderr\": 0.03168470128261473,\n \"mc1\": 0.2484700122399021,\n \"mc1_stderr\": 0.015127427096520681,\n \"mc2\": 0.38499408711109195,\n \"mc2_stderr\": 0.014009970143005233\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.30631399317406144,\n \"acc_stderr\": 0.01347058441727651,\n \"acc_norm\": 0.3378839590443686,\n \"acc_norm_stderr\": 0.013822047922283514\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.41943835889265085,\n \"acc_stderr\": 0.004924586362301657,\n \"acc_norm\": 0.5534754033061143,\n \"acc_norm_stderr\": 0.004961161589228395\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.31,\n \"acc_stderr\": 0.04648231987117316,\n \"acc_norm\": 0.31,\n \"acc_norm_stderr\": 0.04648231987117316\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.21481481481481482,\n \"acc_stderr\": 0.03547854198560826,\n \"acc_norm\": 0.21481481481481482,\n \"acc_norm_stderr\": 0.03547854198560826\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.20394736842105263,\n \"acc_stderr\": 0.032790004063100515,\n \"acc_norm\": 0.20394736842105263,\n \"acc_norm_stderr\": 0.032790004063100515\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.3,\n \"acc_stderr\": 0.046056618647183814,\n \"acc_norm\": 0.3,\n \"acc_norm_stderr\": 0.046056618647183814\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.22641509433962265,\n \"acc_stderr\": 0.025757559893106744,\n \"acc_norm\": 0.22641509433962265,\n \"acc_norm_stderr\": 0.025757559893106744\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.24305555555555555,\n \"acc_stderr\": 0.03586879280080341,\n \"acc_norm\": 0.24305555555555555,\n \"acc_norm_stderr\": 0.03586879280080341\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.16,\n \"acc_stderr\": 0.03684529491774709,\n \"acc_norm\": 0.16,\n \"acc_norm_stderr\": 0.03684529491774709\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.25,\n \"acc_stderr\": 0.04351941398892446,\n \"acc_norm\": 0.25,\n \"acc_norm_stderr\": 0.04351941398892446\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.29,\n \"acc_stderr\": 0.045604802157206845,\n \"acc_norm\": 0.29,\n \"acc_norm_stderr\": 0.045604802157206845\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.21965317919075145,\n \"acc_stderr\": 0.031568093627031744,\n \"acc_norm\": 0.21965317919075145,\n \"acc_norm_stderr\": 0.031568093627031744\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.23529411764705882,\n \"acc_stderr\": 0.04220773659171453,\n \"acc_norm\": 0.23529411764705882,\n \"acc_norm_stderr\": 0.04220773659171453\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.3,\n \"acc_stderr\": 0.046056618647183814,\n \"acc_norm\": 0.3,\n \"acc_norm_stderr\": 0.046056618647183814\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.3148936170212766,\n \"acc_stderr\": 0.030363582197238167,\n \"acc_norm\": 0.3148936170212766,\n \"acc_norm_stderr\": 0.030363582197238167\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.23684210526315788,\n \"acc_stderr\": 0.03999423879281337,\n \"acc_norm\": 0.23684210526315788,\n \"acc_norm_stderr\": 0.03999423879281337\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.22758620689655173,\n \"acc_stderr\": 0.03493950380131184,\n \"acc_norm\": 0.22758620689655173,\n \"acc_norm_stderr\": 0.03493950380131184\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.2751322751322751,\n \"acc_stderr\": 0.02300008685906864,\n \"acc_norm\": 0.2751322751322751,\n \"acc_norm_stderr\": 0.02300008685906864\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.25396825396825395,\n \"acc_stderr\": 0.03893259610604671,\n \"acc_norm\": 0.25396825396825395,\n \"acc_norm_stderr\": 0.03893259610604671\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.3,\n \"acc_stderr\": 0.046056618647183814,\n \"acc_norm\": 0.3,\n \"acc_norm_stderr\": 0.046056618647183814\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.1870967741935484,\n \"acc_stderr\": 0.02218571009225225,\n \"acc_norm\": 0.1870967741935484,\n \"acc_norm_stderr\": 0.02218571009225225\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.1921182266009852,\n \"acc_stderr\": 0.027719315709614775,\n \"acc_norm\": 0.1921182266009852,\n \"acc_norm_stderr\": 0.027719315709614775\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.26,\n \"acc_stderr\": 0.04408440022768079,\n \"acc_norm\": 0.26,\n \"acc_norm_stderr\": 0.04408440022768079\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.21818181818181817,\n \"acc_stderr\": 0.03225078108306289,\n \"acc_norm\": 0.21818181818181817,\n \"acc_norm_stderr\": 0.03225078108306289\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.20707070707070707,\n \"acc_stderr\": 0.02886977846026705,\n \"acc_norm\": 0.20707070707070707,\n \"acc_norm_stderr\": 0.02886977846026705\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.21243523316062177,\n \"acc_stderr\": 0.029519282616817244,\n \"acc_norm\": 0.21243523316062177,\n \"acc_norm_stderr\": 0.029519282616817244\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.24871794871794872,\n \"acc_stderr\": 0.0219169577092138,\n \"acc_norm\": 0.24871794871794872,\n \"acc_norm_stderr\": 0.0219169577092138\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.2740740740740741,\n \"acc_stderr\": 0.027195934804085622,\n \"acc_norm\": 0.2740740740740741,\n \"acc_norm_stderr\": 0.027195934804085622\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.21008403361344538,\n \"acc_stderr\": 0.026461398717471874,\n \"acc_norm\": 0.21008403361344538,\n \"acc_norm_stderr\": 0.026461398717471874\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.2251655629139073,\n \"acc_stderr\": 0.034104352820089376,\n \"acc_norm\": 0.2251655629139073,\n \"acc_norm_stderr\": 0.034104352820089376\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.22568807339449543,\n \"acc_stderr\": 0.017923087667803053,\n \"acc_norm\": 0.22568807339449543,\n \"acc_norm_stderr\": 0.017923087667803053\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.22685185185185186,\n \"acc_stderr\": 0.028561650102422276,\n \"acc_norm\": 0.22685185185185186,\n \"acc_norm_stderr\": 0.028561650102422276\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.2549019607843137,\n \"acc_stderr\": 0.030587591351604243,\n \"acc_norm\": 0.2549019607843137,\n \"acc_norm_stderr\": 0.030587591351604243\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.2616033755274262,\n \"acc_stderr\": 0.028609516716994934,\n \"acc_norm\": 0.2616033755274262,\n \"acc_norm_stderr\": 0.028609516716994934\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.35874439461883406,\n \"acc_stderr\": 0.032190792004199956,\n \"acc_norm\": 0.35874439461883406,\n \"acc_norm_stderr\": 0.032190792004199956\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.2900763358778626,\n \"acc_stderr\": 0.03980066246467765,\n \"acc_norm\": 0.2900763358778626,\n \"acc_norm_stderr\": 0.03980066246467765\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.2396694214876033,\n \"acc_stderr\": 0.03896878985070417,\n \"acc_norm\": 0.2396694214876033,\n \"acc_norm_stderr\": 0.03896878985070417\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.3148148148148148,\n \"acc_stderr\": 0.04489931073591312,\n \"acc_norm\": 0.3148148148148148,\n \"acc_norm_stderr\": 0.04489931073591312\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.18404907975460122,\n \"acc_stderr\": 0.030446777687971743,\n \"acc_norm\": 0.18404907975460122,\n \"acc_norm_stderr\": 0.030446777687971743\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.32142857142857145,\n \"acc_stderr\": 0.0443280405529152,\n \"acc_norm\": 0.32142857142857145,\n \"acc_norm_stderr\": 0.0443280405529152\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.2912621359223301,\n \"acc_stderr\": 0.04498676320572922,\n \"acc_norm\": 0.2912621359223301,\n \"acc_norm_stderr\": 0.04498676320572922\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.28205128205128205,\n \"acc_stderr\": 0.02948036054954119,\n \"acc_norm\": 0.28205128205128205,\n \"acc_norm_stderr\": 0.02948036054954119\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.29,\n \"acc_stderr\": 0.045604802157206845,\n \"acc_norm\": 0.29,\n \"acc_norm_stderr\": 0.045604802157206845\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.2656449553001277,\n \"acc_stderr\": 0.01579430248788872,\n \"acc_norm\": 0.2656449553001277,\n \"acc_norm_stderr\": 0.01579430248788872\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.23699421965317918,\n \"acc_stderr\": 0.02289408248992599,\n \"acc_norm\": 0.23699421965317918,\n \"acc_norm_stderr\": 0.02289408248992599\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.23575418994413408,\n \"acc_stderr\": 0.014196375686290804,\n \"acc_norm\": 0.23575418994413408,\n \"acc_norm_stderr\": 0.014196375686290804\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.238562091503268,\n \"acc_stderr\": 0.024404394928087873,\n \"acc_norm\": 0.238562091503268,\n \"acc_norm_stderr\": 0.024404394928087873\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.2572347266881029,\n \"acc_stderr\": 0.024826171289250888,\n \"acc_norm\": 0.2572347266881029,\n \"acc_norm_stderr\": 0.024826171289250888\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.2345679012345679,\n \"acc_stderr\": 0.023576881744005712,\n \"acc_norm\": 0.2345679012345679,\n \"acc_norm_stderr\": 0.023576881744005712\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.24468085106382978,\n \"acc_stderr\": 0.02564555362226673,\n \"acc_norm\": 0.24468085106382978,\n \"acc_norm_stderr\": 0.02564555362226673\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.23859191655801826,\n \"acc_stderr\": 0.010885929742002202,\n \"acc_norm\": 0.23859191655801826,\n \"acc_norm_stderr\": 0.010885929742002202\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.1948529411764706,\n \"acc_stderr\": 0.024060599423487424,\n \"acc_norm\": 0.1948529411764706,\n \"acc_norm_stderr\": 0.024060599423487424\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.24836601307189543,\n \"acc_stderr\": 0.017479487001364764,\n \"acc_norm\": 0.24836601307189543,\n \"acc_norm_stderr\": 0.017479487001364764\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.24545454545454545,\n \"acc_stderr\": 0.04122066502878285,\n \"acc_norm\": 0.24545454545454545,\n \"acc_norm_stderr\": 0.04122066502878285\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.2,\n \"acc_stderr\": 0.025607375986579153,\n \"acc_norm\": 0.2,\n \"acc_norm_stderr\": 0.025607375986579153\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.26865671641791045,\n \"acc_stderr\": 0.03134328358208954,\n \"acc_norm\": 0.26865671641791045,\n \"acc_norm_stderr\": 0.03134328358208954\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.28,\n \"acc_stderr\": 0.045126085985421276,\n \"acc_norm\": 0.28,\n \"acc_norm_stderr\": 0.045126085985421276\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.29518072289156627,\n \"acc_stderr\": 0.035509201856896294,\n \"acc_norm\": 0.29518072289156627,\n \"acc_norm_stderr\": 0.035509201856896294\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.3216374269005848,\n \"acc_stderr\": 0.03582529442573122,\n \"acc_norm\": 0.3216374269005848,\n \"acc_norm_stderr\": 0.03582529442573122\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.2484700122399021,\n \"mc1_stderr\": 0.015127427096520681,\n \"mc2\": 0.38499408711109195,\n \"mc2_stderr\": 0.014009970143005233\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.5943172849250198,\n \"acc_stderr\": 0.013800206336014205\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.009855951478392721,\n \"acc_stderr\": 0.002721076577041661\n }\n}\n```", "repo_url": "https://huggingface.co/22h/open-cabrita3b", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_01_25T04_42_07.614835", "path": ["**/details_harness|arc:challenge|25_2024-01-25T04-42-07.614835.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-01-25T04-42-07.614835.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_01_25T04_42_07.614835", "path": ["**/details_harness|gsm8k|5_2024-01-25T04-42-07.614835.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-01-25T04-42-07.614835.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_01_25T04_42_07.614835", "path": ["**/details_harness|hellaswag|10_2024-01-25T04-42-07.614835.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-01-25T04-42-07.614835.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_01_25T04_42_07.614835", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-25T04-42-07.614835.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-25T04-42-07.614835.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-25T04-42-07.614835.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-25T04-42-07.614835.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-25T04-42-07.614835.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-25T04-42-07.614835.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-25T04-42-07.614835.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-25T04-42-07.614835.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-25T04-42-07.614835.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-25T04-42-07.614835.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-25T04-42-07.614835.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-25T04-42-07.614835.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-25T04-42-07.614835.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-25T04-42-07.614835.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-25T04-42-07.614835.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-25T04-42-07.614835.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-25T04-42-07.614835.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-25T04-42-07.614835.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-25T04-42-07.614835.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-25T04-42-07.614835.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-25T04-42-07.614835.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-25T04-42-07.614835.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-25T04-42-07.614835.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-25T04-42-07.614835.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-25T04-42-07.614835.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-25T04-42-07.614835.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-25T04-42-07.614835.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-25T04-42-07.614835.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-25T04-42-07.614835.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-25T04-42-07.614835.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-25T04-42-07.614835.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-25T04-42-07.614835.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-25T04-42-07.614835.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-25T04-42-07.614835.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-25T04-42-07.614835.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-25T04-42-07.614835.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-25T04-42-07.614835.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-25T04-42-07.614835.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-25T04-42-07.614835.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-25T04-42-07.614835.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-25T04-42-07.614835.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-25T04-42-07.614835.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-25T04-42-07.614835.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-25T04-42-07.614835.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-25T04-42-07.614835.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-25T04-42-07.614835.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-25T04-42-07.614835.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-25T04-42-07.614835.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-25T04-42-07.614835.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-25T04-42-07.614835.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-25T04-42-07.614835.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-25T04-42-07.614835.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-25T04-42-07.614835.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-25T04-42-07.614835.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-25T04-42-07.614835.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-25T04-42-07.614835.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-25T04-42-07.614835.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-25T04-42-07.614835.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-25T04-42-07.614835.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-25T04-42-07.614835.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-25T04-42-07.614835.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-25T04-42-07.614835.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-25T04-42-07.614835.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-25T04-42-07.614835.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-25T04-42-07.614835.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-25T04-42-07.614835.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-25T04-42-07.614835.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-25T04-42-07.614835.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-25T04-42-07.614835.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-25T04-42-07.614835.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-25T04-42-07.614835.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-25T04-42-07.614835.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-25T04-42-07.614835.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-25T04-42-07.614835.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-25T04-42-07.614835.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-25T04-42-07.614835.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-25T04-42-07.614835.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-25T04-42-07.614835.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-25T04-42-07.614835.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-25T04-42-07.614835.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-25T04-42-07.614835.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-25T04-42-07.614835.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-25T04-42-07.614835.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-25T04-42-07.614835.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-25T04-42-07.614835.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-25T04-42-07.614835.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-25T04-42-07.614835.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-25T04-42-07.614835.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-25T04-42-07.614835.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-25T04-42-07.614835.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-25T04-42-07.614835.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-25T04-42-07.614835.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-25T04-42-07.614835.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-25T04-42-07.614835.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-25T04-42-07.614835.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-25T04-42-07.614835.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-25T04-42-07.614835.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-25T04-42-07.614835.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-25T04-42-07.614835.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-25T04-42-07.614835.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-25T04-42-07.614835.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-25T04-42-07.614835.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-25T04-42-07.614835.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-25T04-42-07.614835.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-25T04-42-07.614835.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-25T04-42-07.614835.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-25T04-42-07.614835.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-25T04-42-07.614835.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-25T04-42-07.614835.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-25T04-42-07.614835.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-25T04-42-07.614835.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-25T04-42-07.614835.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-25T04-42-07.614835.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-25T04-42-07.614835.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_01_25T04_42_07.614835", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-25T04-42-07.614835.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-25T04-42-07.614835.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_01_25T04_42_07.614835", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-25T04-42-07.614835.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-25T04-42-07.614835.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_01_25T04_42_07.614835", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-25T04-42-07.614835.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-25T04-42-07.614835.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_01_25T04_42_07.614835", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-25T04-42-07.614835.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-25T04-42-07.614835.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_01_25T04_42_07.614835", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-25T04-42-07.614835.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-25T04-42-07.614835.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_01_25T04_42_07.614835", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-25T04-42-07.614835.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-25T04-42-07.614835.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_01_25T04_42_07.614835", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-25T04-42-07.614835.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-25T04-42-07.614835.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_01_25T04_42_07.614835", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-25T04-42-07.614835.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-25T04-42-07.614835.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_01_25T04_42_07.614835", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-25T04-42-07.614835.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-25T04-42-07.614835.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_01_25T04_42_07.614835", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-25T04-42-07.614835.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-25T04-42-07.614835.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_01_25T04_42_07.614835", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-25T04-42-07.614835.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-25T04-42-07.614835.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_01_25T04_42_07.614835", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-25T04-42-07.614835.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-25T04-42-07.614835.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_01_25T04_42_07.614835", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-25T04-42-07.614835.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-25T04-42-07.614835.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_01_25T04_42_07.614835", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-25T04-42-07.614835.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-25T04-42-07.614835.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_01_25T04_42_07.614835", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-25T04-42-07.614835.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-25T04-42-07.614835.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_01_25T04_42_07.614835", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-25T04-42-07.614835.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-25T04-42-07.614835.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_01_25T04_42_07.614835", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-25T04-42-07.614835.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-25T04-42-07.614835.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_01_25T04_42_07.614835", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-25T04-42-07.614835.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-25T04-42-07.614835.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_01_25T04_42_07.614835", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-25T04-42-07.614835.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-25T04-42-07.614835.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_01_25T04_42_07.614835", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-25T04-42-07.614835.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-25T04-42-07.614835.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_01_25T04_42_07.614835", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-25T04-42-07.614835.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-25T04-42-07.614835.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_01_25T04_42_07.614835", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-25T04-42-07.614835.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-25T04-42-07.614835.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_01_25T04_42_07.614835", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-25T04-42-07.614835.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-25T04-42-07.614835.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_01_25T04_42_07.614835", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-25T04-42-07.614835.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-25T04-42-07.614835.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_01_25T04_42_07.614835", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-25T04-42-07.614835.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-25T04-42-07.614835.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_01_25T04_42_07.614835", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-25T04-42-07.614835.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-25T04-42-07.614835.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_01_25T04_42_07.614835", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-25T04-42-07.614835.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-25T04-42-07.614835.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_01_25T04_42_07.614835", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-25T04-42-07.614835.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-25T04-42-07.614835.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_01_25T04_42_07.614835", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-25T04-42-07.614835.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-25T04-42-07.614835.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_01_25T04_42_07.614835", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-25T04-42-07.614835.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-25T04-42-07.614835.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_01_25T04_42_07.614835", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-25T04-42-07.614835.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-25T04-42-07.614835.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_01_25T04_42_07.614835", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-25T04-42-07.614835.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-25T04-42-07.614835.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_01_25T04_42_07.614835", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-25T04-42-07.614835.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-25T04-42-07.614835.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_01_25T04_42_07.614835", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-25T04-42-07.614835.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-25T04-42-07.614835.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_01_25T04_42_07.614835", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-25T04-42-07.614835.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-25T04-42-07.614835.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_01_25T04_42_07.614835", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-25T04-42-07.614835.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-25T04-42-07.614835.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_01_25T04_42_07.614835", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-25T04-42-07.614835.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-25T04-42-07.614835.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_01_25T04_42_07.614835", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-25T04-42-07.614835.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-25T04-42-07.614835.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_01_25T04_42_07.614835", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-25T04-42-07.614835.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-25T04-42-07.614835.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_01_25T04_42_07.614835", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-25T04-42-07.614835.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-25T04-42-07.614835.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_01_25T04_42_07.614835", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-25T04-42-07.614835.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-25T04-42-07.614835.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_01_25T04_42_07.614835", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-25T04-42-07.614835.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-25T04-42-07.614835.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_01_25T04_42_07.614835", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-25T04-42-07.614835.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-25T04-42-07.614835.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_01_25T04_42_07.614835", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-25T04-42-07.614835.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-25T04-42-07.614835.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_01_25T04_42_07.614835", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-25T04-42-07.614835.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-25T04-42-07.614835.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_01_25T04_42_07.614835", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-25T04-42-07.614835.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-25T04-42-07.614835.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_01_25T04_42_07.614835", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-25T04-42-07.614835.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-25T04-42-07.614835.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_01_25T04_42_07.614835", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-25T04-42-07.614835.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-25T04-42-07.614835.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_01_25T04_42_07.614835", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-25T04-42-07.614835.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-25T04-42-07.614835.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_01_25T04_42_07.614835", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-25T04-42-07.614835.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-25T04-42-07.614835.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_01_25T04_42_07.614835", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-25T04-42-07.614835.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-25T04-42-07.614835.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_01_25T04_42_07.614835", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-25T04-42-07.614835.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-25T04-42-07.614835.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_01_25T04_42_07.614835", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-25T04-42-07.614835.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-25T04-42-07.614835.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_01_25T04_42_07.614835", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-25T04-42-07.614835.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-25T04-42-07.614835.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_01_25T04_42_07.614835", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-25T04-42-07.614835.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-25T04-42-07.614835.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_01_25T04_42_07.614835", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-25T04-42-07.614835.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-25T04-42-07.614835.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_01_25T04_42_07.614835", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-25T04-42-07.614835.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-25T04-42-07.614835.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_01_25T04_42_07.614835", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-25T04-42-07.614835.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-25T04-42-07.614835.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_01_25T04_42_07.614835", "path": ["**/details_harness|winogrande|5_2024-01-25T04-42-07.614835.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-01-25T04-42-07.614835.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_01_25T04_42_07.614835", "path": ["results_2024-01-25T04-42-07.614835.parquet"]}, {"split": "latest", "path": ["results_2024-01-25T04-42-07.614835.parquet"]}]}]}
2024-01-25T04:44:13+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of 22h/open-cabrita3b Dataset automatically created during the evaluation run of model 22h/open-cabrita3b on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-01-25T04:42:07.614835(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of 22h/open-cabrita3b\n\n\n\nDataset automatically created during the evaluation run of model 22h/open-cabrita3b on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-25T04:42:07.614835(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of 22h/open-cabrita3b\n\n\n\nDataset automatically created during the evaluation run of model 22h/open-cabrita3b on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-25T04:42:07.614835(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ 6, 181, 68, 4, 40, 29, 3, 4, 9, 6, 5, 7, 4, 7, 10, 9, 5, 9, 8, 10, 46, 8, 7, 10, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of 22h/open-cabrita3b\n\n\n\nDataset automatically created during the evaluation run of model 22h/open-cabrita3b on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2024-01-25T04:42:07.614835(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):## Dataset Details### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:## Uses### Direct Use### Out-of-Scope Use## Dataset Structure## Dataset Creation### Curation Rationale### Source Data#### Data Collection and Processing#### Who are the source data producers?### Annotations [optional]#### Annotation process#### Who are the annotators?#### Personal and Sensitive Information## Bias, Risks, and Limitations### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:## Glossary [optional]## More Information [optional]## Dataset Card Authors [optional]## Dataset Card Contact" ]
[ -0.05176185443997383, 0.20264407992362976, -0.005735395476222038, 0.04868210479617119, 0.06989095360040665, -0.017150312662124634, 0.05177357792854309, 0.10947511345148087, 0.02444494515657425, 0.18947206437587738, -0.016457701101899147, 0.1042962521314621, 0.07954555749893188, 0.11868730187416077, 0.02589266002178192, -0.1307915598154068, 0.032499223947525024, -0.09378909319639206, 0.1220109686255455, 0.06481719762086868, 0.05337778851389885, -0.07900047302246094, 0.06700406223535538, -0.02882792055606842, 0.04667925834655762, -0.006674845702946186, -0.06617088615894318, -0.02672273851931095, 0.11082713305950165, 0.10730455070734024, 0.036253299564123154, -0.0156162790954113, 0.029801243916153908, -0.26623737812042236, 0.018973765894770622, 0.09768791496753693, -0.015769831836223602, 0.04339832440018654, 0.14946478605270386, -0.08841031789779663, 0.08496031910181046, -0.029397856444120407, 0.06672067940235138, 0.053847309201955795, -0.10625524818897247, -0.15631672739982605, -0.15765631198883057, 0.001115655293688178, 0.0636008232831955, 0.045213937759399414, -0.022528663277626038, 0.13625070452690125, -0.05177941173315048, 0.05091797560453415, 0.1471390575170517, -0.14316488802433014, -0.017834192141890526, 0.042538970708847046, 0.01776546984910965, 0.08601560443639755, -0.07453085482120514, -0.028522878885269165, 0.03305575251579285, 0.0585213303565979, -0.006987619213759899, 0.014426756650209427, 0.009019453078508377, 0.009767798706889153, -0.14774562418460846, -0.12743018567562103, 0.11380116641521454, 0.004533958621323109, -0.05075056478381157, -0.1702517718076706, -0.038032907992601395, 0.01252299826592207, 0.0004340109881013632, 0.027760136872529984, -0.0010237203678116202, -0.011312569491565228, 0.09252524375915527, -0.010321053676307201, -0.09395615756511688, -0.0319000743329525, -0.018024003133177757, 0.045512598007917404, 0.03112231194972992, 0.002957768039777875, 0.008927633054554462, 0.11837606877088547, 0.026374738663434982, -0.047727685421705246, -0.07662753015756607, -0.048731960356235504, -0.1064373031258583, -0.03003072552382946, 0.015048118308186531, -0.07702131569385529, 0.04678701236844063, 0.2214389592409134, -0.024016620591282845, 0.028832657262682915, -0.11798695474863052, 0.022673489525914192, 0.11918867379426956, 0.0559917613863945, -0.08761607110500336, -0.055805645883083344, -0.03574458882212639, 0.020952897146344185, 0.03153909742832184, -0.01754811219871044, 0.007473047357052565, 0.073996402323246, 0.013398987241089344, 0.12676091492176056, 0.11994708329439163, 0.023168183863162994, -0.06719815731048584, -0.01618514209985733, 0.20273225009441376, -0.151951864361763, -0.006877620238810778, 0.0286091398447752, -0.03549851104617119, -0.10367962718009949, 0.060478195548057556, -0.014582809060811996, -0.06460488587617874, 0.11670324951410294, -0.04435022920370102, -0.07603935897350311, -0.08819135278463364, -0.07787933945655823, 0.04214727133512497, -0.014719560742378235, -0.06348590552806854, -0.06826508790254593, -0.11193320155143738, -0.08374299854040146, 0.031345609575510025, -0.07008088380098343, -0.004652924835681915, 0.008791335858404636, 0.011400223709642887, -0.008818953298032284, -0.01566466875374317, 0.11530090123414993, -0.07349833846092224, 0.03367539122700691, -0.044485483318567276, 0.028872210532426834, 0.09367238730192184, 0.029029566794633865, -0.10666818171739578, 0.08538620918989182, -0.09641405940055847, 0.1013331264257431, -0.11109896004199982, -0.032626282423734665, -0.12015020847320557, 0.003303833771497011, -0.02206202782690525, 0.04391546547412872, -0.024236520752310753, 0.08482985943555832, -0.20589441061019897, -0.0023235012777149677, 0.1950569897890091, -0.1131538450717926, -0.0660114660859108, 0.09511101990938187, -0.03962567076086998, 0.04840882495045662, 0.037103019654750824, 0.08765123039484024, 0.10767707973718643, -0.08236420899629593, -0.09776519238948822, -0.043646588921546936, -0.029898453503847122, 0.1405477672815323, 0.06681413948535919, -0.09379320591688156, 0.0974390059709549, 0.03311661258339882, 0.012938354164361954, -0.06656447798013687, -0.006151038222014904, -0.05636613070964813, -0.014696874655783176, -0.05594753101468086, -0.07619889080524445, -0.019092250615358353, -0.07483937591314316, -0.014841645956039429, -0.05921410396695137, -0.0031074644066393375, 0.10374143719673157, -0.02648155204951763, 0.024635668843984604, -0.08381103724241257, 0.05313799902796745, -0.0022233775816857815, 0.014015291817486286, -0.21387979388237, -0.07223451137542725, 0.03298485279083252, -0.19788789749145508, 0.04796431586146355, 0.03266959637403488, 0.01769772544503212, 0.06464994698762894, -0.0017269214149564505, 0.019673999398946762, 0.03769775852560997, -0.017340412363409996, -0.01249021477997303, -0.15672044456005096, -0.04690523073077202, -0.08584261685609818, 0.07559693604707718, -0.1119060292840004, -0.0188190545886755, 0.06505534797906876, 0.1451876312494278, 0.02102239616215229, -0.0733531266450882, 0.0492461659014225, 0.011192724108695984, -0.04710007458925247, -0.0541808158159256, -0.0002535126986913383, -0.022103581577539444, 0.03815024718642235, 0.060544684529304504, -0.17931652069091797, -0.10490281879901886, 0.07254344969987869, 0.13940827548503876, -0.05699257180094719, -0.07785467058420181, -0.07132377475500107, -0.056676872074604034, -0.09466228634119034, -0.0674048587679863, 0.08332907408475876, 0.0933849886059761, 0.05714007094502449, -0.07873527705669403, -0.0554271899163723, 0.00636791717261076, 0.04824254661798477, -0.06806576251983643, 0.11614059656858444, 0.08526366949081421, -0.08943958580493927, 0.10805875808000565, -0.05145652964711189, 0.09828847646713257, 0.09538590162992477, 0.023205608129501343, -0.11249160766601562, 0.006039941217750311, 0.07349886000156403, 0.05507143959403038, 0.0725301057100296, -0.009910110384225845, 0.038866233080625534, 0.08680229634046555, -0.00269236508756876, 0.044596537947654724, -0.07418927550315857, 0.030468106269836426, 0.026605956256389618, -0.0018119604792445898, 0.02169135771691799, 0.006776049267500639, 0.025324471294879913, 0.10041934996843338, 0.0207294300198555, 0.07626783102750778, -0.03547387570142746, -0.058891378343105316, -0.104091115295887, 0.1451156735420227, -0.09196692705154419, -0.24018177390098572, -0.17427018284797668, -0.043846696615219116, -0.03283603489398956, -0.00898384302854538, 0.059157051146030426, 0.0028210394084453583, -0.10650639235973358, -0.12764166295528412, 0.04300016537308693, 0.036950644105672836, -0.1303137242794037, -0.05346287786960602, 0.04135171324014664, -0.01246466115117073, -0.17045153677463531, 0.036157503724098206, 0.04827991500496864, -0.06333751976490021, 0.011389675550162792, 0.0671142041683197, 0.10552442073822021, 0.09518609195947647, 0.08783162385225296, -0.023740176111459732, -0.01733555644750595, 0.15610463917255402, -0.10320954024791718, 0.02644890546798706, 0.0886082798242569, -0.038582514971494675, 0.07810958474874496, 0.14041058719158173, 0.008589514531195164, -0.08548632264137268, 0.05402567982673645, 0.10446391999721527, -0.06442518532276154, -0.25433066487312317, -0.10556641221046448, -0.03017572872340679, 0.04891793802380562, 0.10592151433229446, 0.07292795181274414, 0.0032560552936047316, 0.005193971563130617, -0.12225097417831421, -0.02744624949991703, -0.03651978075504303, 0.060849811881780624, 0.02544882893562317, -0.010988197289407253, 0.04435000196099281, -0.05119653418660164, 0.022787589579820633, 0.134547159075737, 0.035787977278232574, 0.16099600493907928, -0.043039239943027496, 0.19645148515701294, 0.09360124915838242, 0.07449228316545486, -0.03604504093527794, 0.059715088456869125, -0.024157347157597542, 0.07489611953496933, -0.02188187465071678, -0.10130883753299713, -0.03659065067768097, 0.10173992067575455, 0.03971150889992714, -0.05751711130142212, 0.05111256241798401, -0.07656991481781006, 0.047675441950559616, 0.24554681777954102, -0.014518724754452705, -0.11976694315671921, -0.04556659236550331, 0.06048069894313812, -0.04817728325724602, -0.09433839470148087, 0.0031865190248936415, 0.09853744506835938, -0.14902016520500183, 0.0024650837294757366, -0.04145415872335434, 0.07704123109579086, -0.13714314997196198, -0.03199562430381775, -0.03895336017012596, 0.039406463503837585, -0.01614673063158989, 0.10775861144065857, -0.1432943195104599, 0.09463215619325638, -0.007835719734430313, 0.009889273904263973, -0.08574661612510681, 0.0611502043902874, -0.021188296377658844, -0.06419865041971207, 0.13716033101081848, -0.005305818282067776, -0.09370579570531845, -0.05079728364944458, -0.1195291057229042, -0.014277822338044643, 0.05200154706835747, -0.10694972425699234, 0.10850579291582108, 0.021143397316336632, -0.029476214200258255, -0.04137727618217468, -0.0169083159416914, -0.08425208181142807, -0.234107106924057, 0.09698697179555893, -0.1358310580253601, 0.0316435806453228, -0.06354623287916183, -0.04490114003419876, -0.05074393376708031, 0.123944491147995, -0.12569504976272583, -0.058434441685676575, -0.1096506416797638, -0.03337668254971504, 0.15978184342384338, -0.05703790485858917, 0.062048666179180145, -0.04539703205227852, 0.17081300914287567, -0.0404142290353775, -0.0527556873857975, 0.0076405503787100315, -0.0861077532172203, -0.18919314444065094, -0.05331939831376076, 0.10975312441587448, 0.06879164278507233, 0.018385935574769974, -0.014188004657626152, 0.030466066673398018, 0.014589381404221058, -0.094752237200737, 0.03433119133114815, 0.12549802660942078, 0.13642050325870514, 0.05161985009908676, -0.02577657252550125, -0.10239700227975845, -0.09405016899108887, -0.10753597319126129, 0.058784160763025284, 0.18272383511066437, -0.06221950426697731, 0.15498115122318268, 0.14306509494781494, -0.10220545530319214, -0.2019246518611908, -0.07563219219446182, -0.002983513055369258, -0.021721499040722847, 0.10761217027902603, -0.20021292567253113, 0.04394211620092392, 0.0774238333106041, -0.03235157951712608, 0.11590258777141571, -0.28183403611183167, -0.13818222284317017, 0.042535051703453064, 0.05377570539712906, -0.19342121481895447, -0.16143931448459625, -0.09996029734611511, -0.026048218831419945, -0.13651764392852783, 0.1252223253250122, -0.02188427746295929, 0.03509695082902908, -0.022522319108247757, 0.06539637595415115, 0.04213797673583031, -0.07306855171918869, 0.12609973549842834, -0.028968332335352898, 0.031643468886613846, -0.09877868741750717, -0.018385352566838264, -0.030568590387701988, -0.045136842876672745, 0.07905817031860352, 0.0026046852581202984, 0.049908436834812164, -0.09300048649311066, -0.031169859692454338, -0.05923277139663696, 0.03751019015908241, -0.06532420217990875, -0.05631422623991966, -0.07182179391384125, 0.07961324602365494, 0.0847906693816185, -0.006131537724286318, 0.04345916956663132, -0.04965074360370636, 0.032849278301000595, 0.219259575009346, 0.09173716604709625, 0.05184754356741905, -0.08927127718925476, -0.04695883393287659, -0.016018101945519447, 0.0013729138299822807, -0.10032035410404205, 0.04439324513077736, 0.08294844627380371, 0.037892308086156845, 0.10181828588247299, -0.019123202189803123, -0.19063323736190796, 0.005192701239138842, 0.07431556284427643, -0.09967005997896194, -0.20194576680660248, 0.049976643174886703, 0.12576259672641754, -0.12347127497196198, -0.07756069302558899, 0.08669833838939667, 0.024250520393252373, -0.030014147982001305, -0.0019175764173269272, 0.0738494023680687, 0.04504355043172836, 0.08428952097892761, 0.000439472816651687, 0.04438655078411102, -0.06617645919322968, 0.09848379343748093, 0.14031074941158295, -0.10895906388759613, 0.010579644702374935, 0.05370543897151947, -0.046301521360874176, -0.06973213702440262, 0.0016603545518592, 0.037993982434272766, 0.013317174278199673, -0.03705764189362526, 0.008802883327007294, -0.044691357761621475, 0.0758621096611023, 0.16978032886981964, -0.012238793075084686, 0.058168116956949234, 0.024895833805203438, -0.0011895527131855488, -0.051771339029073715, 0.10170336067676544, 0.017141394317150116, 0.04321927949786186, -0.016141146421432495, 0.03143591806292534, 0.015076816082000732, -0.027309229597449303, 0.020317981019616127, -0.05168467015028, -0.07102058827877045, 0.007537515833973885, -0.19942206144332886, 0.060930248349905014, -0.07509810477495193, -0.002846325049176812, -0.0034936261363327503, -0.0016626082360744476, 0.0028912124689668417, 0.005947320256382227, -0.07226952910423279, -0.04319450259208679, -0.04788323491811752, 0.13058148324489594, -0.20008540153503418, -0.0007550346781499684, 0.08147288858890533, -0.07205290347337723, 0.06940721720457077, -0.0022450191900134087, -0.014578337781131268, 0.02303488925099373, -0.09222167730331421, 0.00099813228007406, -0.03192443028092384, 0.06296691298484802, 0.01741798035800457, -0.12306258827447891, -0.012278638780117035, -0.005485190078616142, -0.08583331108093262, -0.004777988418936729, 0.029952680692076683, -0.14377662539482117, 0.0814807265996933, 0.08713938295841217, -0.043262556195259094, -0.04347888007760048, 0.04179573804140091, 0.03982313349843025, 0.003717729588970542, 0.08798719942569733, -0.01996154524385929, 0.030792001634836197, -0.151060089468956, -0.0386885404586792, 0.012840594165027142, 0.007435329258441925, 0.03950626030564308, 0.007598217576742172, 0.022868700325489044, -0.002753558335825801, 0.23790252208709717, -0.014738867059350014, 0.010386156849563122, 0.020194686949253082, -0.01916125789284706, -0.03586637228727341, 0.03617390617728233, -0.007804078981280327, -0.006488542538136244, 0.027595479041337967, 0.004942324012517929, -0.034999266266822815, -0.057516664266586304, -0.0050912536680698395, 0.08614750951528549, 0.1287299543619156, 0.19181036949157715, -0.02786148339509964, 0.05648878961801529, -0.16452467441558838, -0.043684788048267365, 0.0011108685284852982, -0.05404604598879814, 0.053675923496484756, -0.0697702094912529, 0.0699474960565567, 0.09490688145160675, -0.1072111502289772, 0.14991770684719086, -0.048633165657520294, -0.01689682900905609, -0.0456545315682888, -0.16650129854679108, -0.03605106845498085, 0.03581364452838898, 0.0009341235854662955, -0.08955834060907364, 0.11533018201589584, 0.12024765461683273, 0.008094209246337414, 0.0028624930419027805, 0.07248082756996155, -0.07983149588108063, -0.04913291707634926, -0.032757874578237534, 0.010630869306623936, 0.021145975217223167, 0.007361171767115593, 0.0569557286798954, 0.01292896643280983, 0.04263339564204216, 0.07071142643690109, 0.10482694953680038, 0.04428587108850479, 0.02638576738536358, -0.03445500135421753, -0.048416171222925186, 0.0009718852816149592, -0.02789372019469738, -0.0625130832195282, 0.2070428729057312, 0.06314150243997574, 0.018373191356658936, 0.023230282589793205, 0.20254094898700714, -0.014370246790349483, -0.0537908710539341, -0.12450672686100006, 0.1603395640850067, -0.010210382752120495, 0.031215636059641838, 0.03248799592256546, -0.10959890484809875, 0.0094589339569211, 0.16043326258659363, 0.1067168191075325, 0.02169453166425228, 0.01402650773525238, 0.04960254579782486, 0.02303287945687771, -0.025390293449163437, 0.04993846267461777, 0.038966815918684006, 0.2437981218099594, -0.056037623435258865, 0.08060086518526077, -0.011805295944213867, 0.006380853708833456, -0.028157763183116913, 0.11412154138088226, -0.05872545391321182, 0.021032439544796944, -0.0649547204375267, 0.06875669211149216, -0.07138130068778992, -0.25150322914123535, -0.008350590243935585, -0.06381800025701523, -0.13249386847019196, -0.00700802868232131, 0.014695237390697002, -0.022138431668281555, 0.04290592670440674, 0.03799315169453621, -0.02334165759384632, 0.1829129457473755, 0.0030046694446355104, -0.07522251456975937, -0.08275803923606873, 0.054288655519485474, -0.06969735026359558, 0.2836058735847473, 0.0005234009004198015, 0.04650139808654785, 0.08845779299736023, -0.017059065401554108, -0.12385934591293335, 0.034160226583480835, 0.08975636214017868, -0.05637010186910629, 0.04365461319684982, 0.14807137846946716, -0.024462589994072914, 0.15089869499206543, 0.03260298818349838, -0.007010284811258316, 0.07732367515563965, 0.07085476070642471, 0.0316074937582016, -0.08574341237545013, 0.07274546474218369, -0.0950482189655304, 0.12469052523374557, 0.11251777410507202, -0.010669533163309097, 0.002569634933024645, -0.05370477959513664, 0.055922240018844604, -0.051987387239933014, 0.12062734365463257, -0.018851039931178093, -0.14303019642829895, 0.04903849959373474, 0.001994506223127246, 0.0712060034275055, -0.24591362476348877, -0.0575726144015789, 0.10129646211862564, -0.05228974297642708, -0.006481752265244722, 0.0809093490242958, 0.034738607704639435, 0.028031447902321815, -0.055425696074962616, -0.1265207976102829, 0.021950343623757362, 0.11344950646162033, -0.06468857079744339, -0.03789252042770386 ]
f671e98a08fea00250a840d5305aacebd941d61c
# Dataset Card for Evaluation run of Aryanne/sheared-plus-westlake-nearest-50_75p <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [Aryanne/sheared-plus-westlake-nearest-50_75p](https://huggingface.co/Aryanne/sheared-plus-westlake-nearest-50_75p) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_Aryanne__sheared-plus-westlake-nearest-50_75p", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-01-25T04:58:33.658869](https://huggingface.co/datasets/open-llm-leaderboard/details_Aryanne__sheared-plus-westlake-nearest-50_75p/blob/main/results_2024-01-25T04-58-33.658869.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.24806262363403858, "acc_stderr": 0.030377724749675428, "acc_norm": 0.2496810641527624, "acc_norm_stderr": 0.0311760528500176, "mc1": 0.2631578947368421, "mc1_stderr": 0.015415241740237012, "mc2": 0.4238910085628728, "mc2_stderr": 0.014960449575853437 }, "harness|arc:challenge|25": { "acc": 0.3310580204778157, "acc_stderr": 0.013752062419817832, "acc_norm": 0.36177474402730375, "acc_norm_stderr": 0.014041957945038083 }, "harness|hellaswag|10": { "acc": 0.43925512846046605, "acc_stderr": 0.0049528205388318855, "acc_norm": 0.5753833897629954, "acc_norm_stderr": 0.004932745013072706 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.24, "acc_stderr": 0.042923469599092816, "acc_norm": 0.24, "acc_norm_stderr": 0.042923469599092816 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.2518518518518518, "acc_stderr": 0.037498507091740206, "acc_norm": 0.2518518518518518, "acc_norm_stderr": 0.037498507091740206 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.19736842105263158, "acc_stderr": 0.03238981601699397, "acc_norm": 0.19736842105263158, "acc_norm_stderr": 0.03238981601699397 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.3, "acc_stderr": 0.046056618647183814, "acc_norm": 0.3, "acc_norm_stderr": 0.046056618647183814 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.21509433962264152, "acc_stderr": 0.02528839450289137, "acc_norm": 0.21509433962264152, "acc_norm_stderr": 0.02528839450289137 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.2569444444444444, "acc_stderr": 0.03653946969442099, "acc_norm": 0.2569444444444444, "acc_norm_stderr": 0.03653946969442099 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.21, "acc_stderr": 0.040936018074033256, "acc_norm": 0.21, "acc_norm_stderr": 0.040936018074033256 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.23, "acc_stderr": 0.04229525846816505, "acc_norm": 0.23, "acc_norm_stderr": 0.04229525846816505 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.2, "acc_stderr": 0.04020151261036846, "acc_norm": 0.2, "acc_norm_stderr": 0.04020151261036846 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.20809248554913296, "acc_stderr": 0.030952890217749874, "acc_norm": 0.20809248554913296, "acc_norm_stderr": 0.030952890217749874 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.21568627450980393, "acc_stderr": 0.04092563958237654, "acc_norm": 0.21568627450980393, "acc_norm_stderr": 0.04092563958237654 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.26, "acc_stderr": 0.0440844002276808, "acc_norm": 0.26, "acc_norm_stderr": 0.0440844002276808 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.225531914893617, "acc_stderr": 0.027321078417387533, "acc_norm": 0.225531914893617, "acc_norm_stderr": 0.027321078417387533 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.23684210526315788, "acc_stderr": 0.039994238792813365, "acc_norm": 0.23684210526315788, "acc_norm_stderr": 0.039994238792813365 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.2413793103448276, "acc_stderr": 0.03565998174135302, "acc_norm": 0.2413793103448276, "acc_norm_stderr": 0.03565998174135302 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.20899470899470898, "acc_stderr": 0.02094048156533486, "acc_norm": 0.20899470899470898, "acc_norm_stderr": 0.02094048156533486 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.2857142857142857, "acc_stderr": 0.0404061017820884, "acc_norm": 0.2857142857142857, "acc_norm_stderr": 0.0404061017820884 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.18, "acc_stderr": 0.038612291966536934, "acc_norm": 0.18, "acc_norm_stderr": 0.038612291966536934 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.20967741935483872, "acc_stderr": 0.02315787934908353, "acc_norm": 0.20967741935483872, "acc_norm_stderr": 0.02315787934908353 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.1625615763546798, "acc_stderr": 0.025960300064605594, "acc_norm": 0.1625615763546798, "acc_norm_stderr": 0.025960300064605594 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.27, "acc_stderr": 0.04461960433384741, "acc_norm": 0.27, "acc_norm_stderr": 0.04461960433384741 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.2787878787878788, "acc_stderr": 0.03501438706296781, "acc_norm": 0.2787878787878788, "acc_norm_stderr": 0.03501438706296781 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.17676767676767677, "acc_stderr": 0.027178752639044915, "acc_norm": 0.17676767676767677, "acc_norm_stderr": 0.027178752639044915 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.24352331606217617, "acc_stderr": 0.030975436386845447, "acc_norm": 0.24352331606217617, "acc_norm_stderr": 0.030975436386845447 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.24871794871794872, "acc_stderr": 0.0219169577092138, "acc_norm": 0.24871794871794872, "acc_norm_stderr": 0.0219169577092138 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.22592592592592592, "acc_stderr": 0.02549753263960955, "acc_norm": 0.22592592592592592, "acc_norm_stderr": 0.02549753263960955 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.20588235294117646, "acc_stderr": 0.026265024608275882, "acc_norm": 0.20588235294117646, "acc_norm_stderr": 0.026265024608275882 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.2119205298013245, "acc_stderr": 0.03336767086567976, "acc_norm": 0.2119205298013245, "acc_norm_stderr": 0.03336767086567976 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.1926605504587156, "acc_stderr": 0.016909276884936094, "acc_norm": 0.1926605504587156, "acc_norm_stderr": 0.016909276884936094 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.2037037037037037, "acc_stderr": 0.027467401804058, "acc_norm": 0.2037037037037037, "acc_norm_stderr": 0.027467401804058 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.23529411764705882, "acc_stderr": 0.029771775228145638, "acc_norm": 0.23529411764705882, "acc_norm_stderr": 0.029771775228145638 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.26582278481012656, "acc_stderr": 0.02875679962965834, "acc_norm": 0.26582278481012656, "acc_norm_stderr": 0.02875679962965834 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.31390134529147984, "acc_stderr": 0.031146796482972465, "acc_norm": 0.31390134529147984, "acc_norm_stderr": 0.031146796482972465 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.2595419847328244, "acc_stderr": 0.03844876139785271, "acc_norm": 0.2595419847328244, "acc_norm_stderr": 0.03844876139785271 }, "harness|hendrycksTest-international_law|5": { "acc": 0.2396694214876033, "acc_stderr": 0.03896878985070417, "acc_norm": 0.2396694214876033, "acc_norm_stderr": 0.03896878985070417 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.25, "acc_stderr": 0.04186091791394607, "acc_norm": 0.25, "acc_norm_stderr": 0.04186091791394607 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.22085889570552147, "acc_stderr": 0.032591773927421776, "acc_norm": 0.22085889570552147, "acc_norm_stderr": 0.032591773927421776 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.32142857142857145, "acc_stderr": 0.04432804055291519, "acc_norm": 0.32142857142857145, "acc_norm_stderr": 0.04432804055291519 }, "harness|hendrycksTest-management|5": { "acc": 0.17475728155339806, "acc_stderr": 0.037601780060266224, "acc_norm": 0.17475728155339806, "acc_norm_stderr": 0.037601780060266224 }, "harness|hendrycksTest-marketing|5": { "acc": 0.2905982905982906, "acc_stderr": 0.029745048572674043, "acc_norm": 0.2905982905982906, "acc_norm_stderr": 0.029745048572674043 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.3, "acc_stderr": 0.046056618647183814, "acc_norm": 0.3, "acc_norm_stderr": 0.046056618647183814 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.23754789272030652, "acc_stderr": 0.015218733046150193, "acc_norm": 0.23754789272030652, "acc_norm_stderr": 0.015218733046150193 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.2774566473988439, "acc_stderr": 0.024105712607754307, "acc_norm": 0.2774566473988439, "acc_norm_stderr": 0.024105712607754307 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.23798882681564246, "acc_stderr": 0.014242630070574915, "acc_norm": 0.23798882681564246, "acc_norm_stderr": 0.014242630070574915 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.22549019607843138, "acc_stderr": 0.023929155517351284, "acc_norm": 0.22549019607843138, "acc_norm_stderr": 0.023929155517351284 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.3215434083601286, "acc_stderr": 0.026527724079528872, "acc_norm": 0.3215434083601286, "acc_norm_stderr": 0.026527724079528872 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.2191358024691358, "acc_stderr": 0.023016705640262203, "acc_norm": 0.2191358024691358, "acc_norm_stderr": 0.023016705640262203 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.23404255319148937, "acc_stderr": 0.025257861359432417, "acc_norm": 0.23404255319148937, "acc_norm_stderr": 0.025257861359432417 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.25097783572359844, "acc_stderr": 0.01107373029918724, "acc_norm": 0.25097783572359844, "acc_norm_stderr": 0.01107373029918724 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.34558823529411764, "acc_stderr": 0.02888819310398864, "acc_norm": 0.34558823529411764, "acc_norm_stderr": 0.02888819310398864 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.2647058823529412, "acc_stderr": 0.017848089574913226, "acc_norm": 0.2647058823529412, "acc_norm_stderr": 0.017848089574913226 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.20909090909090908, "acc_stderr": 0.03895091015724136, "acc_norm": 0.20909090909090908, "acc_norm_stderr": 0.03895091015724136 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.17142857142857143, "acc_stderr": 0.024127463462650146, "acc_norm": 0.17142857142857143, "acc_norm_stderr": 0.024127463462650146 }, "harness|hendrycksTest-sociology|5": { "acc": 0.24875621890547264, "acc_stderr": 0.030567675938916707, "acc_norm": 0.24875621890547264, "acc_norm_stderr": 0.030567675938916707 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.28, "acc_stderr": 0.04512608598542128, "acc_norm": 0.28, "acc_norm_stderr": 0.04512608598542128 }, "harness|hendrycksTest-virology|5": { "acc": 0.28313253012048195, "acc_stderr": 0.03507295431370518, "acc_norm": 0.28313253012048195, "acc_norm_stderr": 0.03507295431370518 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.3216374269005848, "acc_stderr": 0.03582529442573122, "acc_norm": 0.3216374269005848, "acc_norm_stderr": 0.03582529442573122 }, "harness|truthfulqa:mc|0": { "mc1": 0.2631578947368421, "mc1_stderr": 0.015415241740237012, "mc2": 0.4238910085628728, "mc2_stderr": 0.014960449575853437 }, "harness|winogrande|5": { "acc": 0.5674822415153907, "acc_stderr": 0.01392391157862384 }, "harness|gsm8k|5": { "acc": 0.0, "acc_stderr": 0.0 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_Aryanne__sheared-plus-westlake-nearest-50_75p
[ "region:us" ]
2024-01-25T05:00:55+00:00
{"pretty_name": "Evaluation run of Aryanne/sheared-plus-westlake-nearest-50_75p", "dataset_summary": "Dataset automatically created during the evaluation run of model [Aryanne/sheared-plus-westlake-nearest-50_75p](https://huggingface.co/Aryanne/sheared-plus-westlake-nearest-50_75p) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_Aryanne__sheared-plus-westlake-nearest-50_75p\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-01-25T04:58:33.658869](https://huggingface.co/datasets/open-llm-leaderboard/details_Aryanne__sheared-plus-westlake-nearest-50_75p/blob/main/results_2024-01-25T04-58-33.658869.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.24806262363403858,\n \"acc_stderr\": 0.030377724749675428,\n \"acc_norm\": 0.2496810641527624,\n \"acc_norm_stderr\": 0.0311760528500176,\n \"mc1\": 0.2631578947368421,\n \"mc1_stderr\": 0.015415241740237012,\n \"mc2\": 0.4238910085628728,\n \"mc2_stderr\": 0.014960449575853437\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.3310580204778157,\n \"acc_stderr\": 0.013752062419817832,\n \"acc_norm\": 0.36177474402730375,\n \"acc_norm_stderr\": 0.014041957945038083\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.43925512846046605,\n \"acc_stderr\": 0.0049528205388318855,\n \"acc_norm\": 0.5753833897629954,\n \"acc_norm_stderr\": 0.004932745013072706\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.24,\n \"acc_stderr\": 0.042923469599092816,\n \"acc_norm\": 0.24,\n \"acc_norm_stderr\": 0.042923469599092816\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.2518518518518518,\n \"acc_stderr\": 0.037498507091740206,\n \"acc_norm\": 0.2518518518518518,\n \"acc_norm_stderr\": 0.037498507091740206\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.19736842105263158,\n \"acc_stderr\": 0.03238981601699397,\n \"acc_norm\": 0.19736842105263158,\n \"acc_norm_stderr\": 0.03238981601699397\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.3,\n \"acc_stderr\": 0.046056618647183814,\n \"acc_norm\": 0.3,\n \"acc_norm_stderr\": 0.046056618647183814\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.21509433962264152,\n \"acc_stderr\": 0.02528839450289137,\n \"acc_norm\": 0.21509433962264152,\n \"acc_norm_stderr\": 0.02528839450289137\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.2569444444444444,\n \"acc_stderr\": 0.03653946969442099,\n \"acc_norm\": 0.2569444444444444,\n \"acc_norm_stderr\": 0.03653946969442099\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.21,\n \"acc_stderr\": 0.040936018074033256,\n \"acc_norm\": 0.21,\n \"acc_norm_stderr\": 0.040936018074033256\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.23,\n \"acc_stderr\": 0.04229525846816505,\n \"acc_norm\": 0.23,\n \"acc_norm_stderr\": 0.04229525846816505\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.2,\n \"acc_stderr\": 0.04020151261036846,\n \"acc_norm\": 0.2,\n \"acc_norm_stderr\": 0.04020151261036846\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.20809248554913296,\n \"acc_stderr\": 0.030952890217749874,\n \"acc_norm\": 0.20809248554913296,\n \"acc_norm_stderr\": 0.030952890217749874\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.21568627450980393,\n \"acc_stderr\": 0.04092563958237654,\n \"acc_norm\": 0.21568627450980393,\n \"acc_norm_stderr\": 0.04092563958237654\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.26,\n \"acc_stderr\": 0.0440844002276808,\n \"acc_norm\": 0.26,\n \"acc_norm_stderr\": 0.0440844002276808\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.225531914893617,\n \"acc_stderr\": 0.027321078417387533,\n \"acc_norm\": 0.225531914893617,\n \"acc_norm_stderr\": 0.027321078417387533\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.23684210526315788,\n \"acc_stderr\": 0.039994238792813365,\n \"acc_norm\": 0.23684210526315788,\n \"acc_norm_stderr\": 0.039994238792813365\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.2413793103448276,\n \"acc_stderr\": 0.03565998174135302,\n \"acc_norm\": 0.2413793103448276,\n \"acc_norm_stderr\": 0.03565998174135302\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.20899470899470898,\n \"acc_stderr\": 0.02094048156533486,\n \"acc_norm\": 0.20899470899470898,\n \"acc_norm_stderr\": 0.02094048156533486\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.2857142857142857,\n \"acc_stderr\": 0.0404061017820884,\n \"acc_norm\": 0.2857142857142857,\n \"acc_norm_stderr\": 0.0404061017820884\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.18,\n \"acc_stderr\": 0.038612291966536934,\n \"acc_norm\": 0.18,\n \"acc_norm_stderr\": 0.038612291966536934\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.20967741935483872,\n \"acc_stderr\": 0.02315787934908353,\n \"acc_norm\": 0.20967741935483872,\n \"acc_norm_stderr\": 0.02315787934908353\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.1625615763546798,\n \"acc_stderr\": 0.025960300064605594,\n \"acc_norm\": 0.1625615763546798,\n \"acc_norm_stderr\": 0.025960300064605594\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.27,\n \"acc_stderr\": 0.04461960433384741,\n \"acc_norm\": 0.27,\n \"acc_norm_stderr\": 0.04461960433384741\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.2787878787878788,\n \"acc_stderr\": 0.03501438706296781,\n \"acc_norm\": 0.2787878787878788,\n \"acc_norm_stderr\": 0.03501438706296781\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.17676767676767677,\n \"acc_stderr\": 0.027178752639044915,\n \"acc_norm\": 0.17676767676767677,\n \"acc_norm_stderr\": 0.027178752639044915\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.24352331606217617,\n \"acc_stderr\": 0.030975436386845447,\n \"acc_norm\": 0.24352331606217617,\n \"acc_norm_stderr\": 0.030975436386845447\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.24871794871794872,\n \"acc_stderr\": 0.0219169577092138,\n \"acc_norm\": 0.24871794871794872,\n \"acc_norm_stderr\": 0.0219169577092138\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.22592592592592592,\n \"acc_stderr\": 0.02549753263960955,\n \"acc_norm\": 0.22592592592592592,\n \"acc_norm_stderr\": 0.02549753263960955\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.20588235294117646,\n \"acc_stderr\": 0.026265024608275882,\n \"acc_norm\": 0.20588235294117646,\n \"acc_norm_stderr\": 0.026265024608275882\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.2119205298013245,\n \"acc_stderr\": 0.03336767086567976,\n \"acc_norm\": 0.2119205298013245,\n \"acc_norm_stderr\": 0.03336767086567976\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.1926605504587156,\n \"acc_stderr\": 0.016909276884936094,\n \"acc_norm\": 0.1926605504587156,\n \"acc_norm_stderr\": 0.016909276884936094\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.2037037037037037,\n \"acc_stderr\": 0.027467401804058,\n \"acc_norm\": 0.2037037037037037,\n \"acc_norm_stderr\": 0.027467401804058\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.23529411764705882,\n \"acc_stderr\": 0.029771775228145638,\n \"acc_norm\": 0.23529411764705882,\n \"acc_norm_stderr\": 0.029771775228145638\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.26582278481012656,\n \"acc_stderr\": 0.02875679962965834,\n \"acc_norm\": 0.26582278481012656,\n \"acc_norm_stderr\": 0.02875679962965834\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.31390134529147984,\n \"acc_stderr\": 0.031146796482972465,\n \"acc_norm\": 0.31390134529147984,\n \"acc_norm_stderr\": 0.031146796482972465\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.2595419847328244,\n \"acc_stderr\": 0.03844876139785271,\n \"acc_norm\": 0.2595419847328244,\n \"acc_norm_stderr\": 0.03844876139785271\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.2396694214876033,\n \"acc_stderr\": 0.03896878985070417,\n \"acc_norm\": 0.2396694214876033,\n \"acc_norm_stderr\": 0.03896878985070417\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.25,\n \"acc_stderr\": 0.04186091791394607,\n \"acc_norm\": 0.25,\n \"acc_norm_stderr\": 0.04186091791394607\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.22085889570552147,\n \"acc_stderr\": 0.032591773927421776,\n \"acc_norm\": 0.22085889570552147,\n \"acc_norm_stderr\": 0.032591773927421776\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.32142857142857145,\n \"acc_stderr\": 0.04432804055291519,\n \"acc_norm\": 0.32142857142857145,\n \"acc_norm_stderr\": 0.04432804055291519\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.17475728155339806,\n \"acc_stderr\": 0.037601780060266224,\n \"acc_norm\": 0.17475728155339806,\n \"acc_norm_stderr\": 0.037601780060266224\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.2905982905982906,\n \"acc_stderr\": 0.029745048572674043,\n \"acc_norm\": 0.2905982905982906,\n \"acc_norm_stderr\": 0.029745048572674043\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.3,\n \"acc_stderr\": 0.046056618647183814,\n \"acc_norm\": 0.3,\n \"acc_norm_stderr\": 0.046056618647183814\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.23754789272030652,\n \"acc_stderr\": 0.015218733046150193,\n \"acc_norm\": 0.23754789272030652,\n \"acc_norm_stderr\": 0.015218733046150193\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.2774566473988439,\n \"acc_stderr\": 0.024105712607754307,\n \"acc_norm\": 0.2774566473988439,\n \"acc_norm_stderr\": 0.024105712607754307\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.23798882681564246,\n \"acc_stderr\": 0.014242630070574915,\n \"acc_norm\": 0.23798882681564246,\n \"acc_norm_stderr\": 0.014242630070574915\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.22549019607843138,\n \"acc_stderr\": 0.023929155517351284,\n \"acc_norm\": 0.22549019607843138,\n \"acc_norm_stderr\": 0.023929155517351284\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.3215434083601286,\n \"acc_stderr\": 0.026527724079528872,\n \"acc_norm\": 0.3215434083601286,\n \"acc_norm_stderr\": 0.026527724079528872\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.2191358024691358,\n \"acc_stderr\": 0.023016705640262203,\n \"acc_norm\": 0.2191358024691358,\n \"acc_norm_stderr\": 0.023016705640262203\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.23404255319148937,\n \"acc_stderr\": 0.025257861359432417,\n \"acc_norm\": 0.23404255319148937,\n \"acc_norm_stderr\": 0.025257861359432417\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.25097783572359844,\n \"acc_stderr\": 0.01107373029918724,\n \"acc_norm\": 0.25097783572359844,\n \"acc_norm_stderr\": 0.01107373029918724\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.34558823529411764,\n \"acc_stderr\": 0.02888819310398864,\n \"acc_norm\": 0.34558823529411764,\n \"acc_norm_stderr\": 0.02888819310398864\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.2647058823529412,\n \"acc_stderr\": 0.017848089574913226,\n \"acc_norm\": 0.2647058823529412,\n \"acc_norm_stderr\": 0.017848089574913226\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.20909090909090908,\n \"acc_stderr\": 0.03895091015724136,\n \"acc_norm\": 0.20909090909090908,\n \"acc_norm_stderr\": 0.03895091015724136\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.17142857142857143,\n \"acc_stderr\": 0.024127463462650146,\n \"acc_norm\": 0.17142857142857143,\n \"acc_norm_stderr\": 0.024127463462650146\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.24875621890547264,\n \"acc_stderr\": 0.030567675938916707,\n \"acc_norm\": 0.24875621890547264,\n \"acc_norm_stderr\": 0.030567675938916707\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.28,\n \"acc_stderr\": 0.04512608598542128,\n \"acc_norm\": 0.28,\n \"acc_norm_stderr\": 0.04512608598542128\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.28313253012048195,\n \"acc_stderr\": 0.03507295431370518,\n \"acc_norm\": 0.28313253012048195,\n \"acc_norm_stderr\": 0.03507295431370518\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.3216374269005848,\n \"acc_stderr\": 0.03582529442573122,\n \"acc_norm\": 0.3216374269005848,\n \"acc_norm_stderr\": 0.03582529442573122\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.2631578947368421,\n \"mc1_stderr\": 0.015415241740237012,\n \"mc2\": 0.4238910085628728,\n \"mc2_stderr\": 0.014960449575853437\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.5674822415153907,\n \"acc_stderr\": 0.01392391157862384\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.0,\n \"acc_stderr\": 0.0\n }\n}\n```", "repo_url": "https://huggingface.co/Aryanne/sheared-plus-westlake-nearest-50_75p", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_01_25T04_58_33.658869", "path": ["**/details_harness|arc:challenge|25_2024-01-25T04-58-33.658869.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-01-25T04-58-33.658869.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_01_25T04_58_33.658869", "path": ["**/details_harness|gsm8k|5_2024-01-25T04-58-33.658869.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-01-25T04-58-33.658869.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_01_25T04_58_33.658869", "path": ["**/details_harness|hellaswag|10_2024-01-25T04-58-33.658869.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-01-25T04-58-33.658869.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_01_25T04_58_33.658869", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-25T04-58-33.658869.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-25T04-58-33.658869.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-25T04-58-33.658869.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-25T04-58-33.658869.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-25T04-58-33.658869.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-25T04-58-33.658869.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-25T04-58-33.658869.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-25T04-58-33.658869.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-25T04-58-33.658869.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-25T04-58-33.658869.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-25T04-58-33.658869.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-25T04-58-33.658869.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-25T04-58-33.658869.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-25T04-58-33.658869.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-25T04-58-33.658869.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-25T04-58-33.658869.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-25T04-58-33.658869.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-25T04-58-33.658869.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-25T04-58-33.658869.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-25T04-58-33.658869.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-25T04-58-33.658869.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-25T04-58-33.658869.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-25T04-58-33.658869.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-25T04-58-33.658869.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-25T04-58-33.658869.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-25T04-58-33.658869.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-25T04-58-33.658869.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-25T04-58-33.658869.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-25T04-58-33.658869.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-25T04-58-33.658869.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-25T04-58-33.658869.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-25T04-58-33.658869.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-25T04-58-33.658869.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-25T04-58-33.658869.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-25T04-58-33.658869.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-25T04-58-33.658869.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-25T04-58-33.658869.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-25T04-58-33.658869.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-25T04-58-33.658869.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-25T04-58-33.658869.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-25T04-58-33.658869.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-25T04-58-33.658869.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-25T04-58-33.658869.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-25T04-58-33.658869.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-25T04-58-33.658869.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-25T04-58-33.658869.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-25T04-58-33.658869.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-25T04-58-33.658869.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-25T04-58-33.658869.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-25T04-58-33.658869.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-25T04-58-33.658869.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-25T04-58-33.658869.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-25T04-58-33.658869.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-25T04-58-33.658869.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-25T04-58-33.658869.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-25T04-58-33.658869.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-25T04-58-33.658869.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-25T04-58-33.658869.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-25T04-58-33.658869.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-25T04-58-33.658869.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-25T04-58-33.658869.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-25T04-58-33.658869.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-25T04-58-33.658869.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-25T04-58-33.658869.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-25T04-58-33.658869.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-25T04-58-33.658869.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-25T04-58-33.658869.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-25T04-58-33.658869.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-25T04-58-33.658869.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-25T04-58-33.658869.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-25T04-58-33.658869.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-25T04-58-33.658869.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-25T04-58-33.658869.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-25T04-58-33.658869.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-25T04-58-33.658869.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-25T04-58-33.658869.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-25T04-58-33.658869.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-25T04-58-33.658869.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-25T04-58-33.658869.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-25T04-58-33.658869.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-25T04-58-33.658869.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-25T04-58-33.658869.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-25T04-58-33.658869.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-25T04-58-33.658869.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-25T04-58-33.658869.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-25T04-58-33.658869.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-25T04-58-33.658869.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-25T04-58-33.658869.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-25T04-58-33.658869.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-25T04-58-33.658869.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-25T04-58-33.658869.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-25T04-58-33.658869.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-25T04-58-33.658869.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-25T04-58-33.658869.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-25T04-58-33.658869.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-25T04-58-33.658869.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-25T04-58-33.658869.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-25T04-58-33.658869.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-25T04-58-33.658869.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-25T04-58-33.658869.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-25T04-58-33.658869.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-25T04-58-33.658869.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-25T04-58-33.658869.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-25T04-58-33.658869.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-25T04-58-33.658869.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-25T04-58-33.658869.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-25T04-58-33.658869.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-25T04-58-33.658869.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-25T04-58-33.658869.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-25T04-58-33.658869.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-25T04-58-33.658869.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-25T04-58-33.658869.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-25T04-58-33.658869.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-25T04-58-33.658869.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_01_25T04_58_33.658869", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-25T04-58-33.658869.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-25T04-58-33.658869.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_01_25T04_58_33.658869", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-25T04-58-33.658869.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-25T04-58-33.658869.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_01_25T04_58_33.658869", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-25T04-58-33.658869.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-25T04-58-33.658869.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_01_25T04_58_33.658869", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-25T04-58-33.658869.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-25T04-58-33.658869.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_01_25T04_58_33.658869", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-25T04-58-33.658869.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-25T04-58-33.658869.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_01_25T04_58_33.658869", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-25T04-58-33.658869.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-25T04-58-33.658869.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_01_25T04_58_33.658869", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-25T04-58-33.658869.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-25T04-58-33.658869.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_01_25T04_58_33.658869", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-25T04-58-33.658869.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-25T04-58-33.658869.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_01_25T04_58_33.658869", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-25T04-58-33.658869.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-25T04-58-33.658869.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_01_25T04_58_33.658869", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-25T04-58-33.658869.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-25T04-58-33.658869.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_01_25T04_58_33.658869", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-25T04-58-33.658869.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-25T04-58-33.658869.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_01_25T04_58_33.658869", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-25T04-58-33.658869.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-25T04-58-33.658869.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_01_25T04_58_33.658869", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-25T04-58-33.658869.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-25T04-58-33.658869.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_01_25T04_58_33.658869", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-25T04-58-33.658869.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-25T04-58-33.658869.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_01_25T04_58_33.658869", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-25T04-58-33.658869.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-25T04-58-33.658869.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_01_25T04_58_33.658869", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-25T04-58-33.658869.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-25T04-58-33.658869.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_01_25T04_58_33.658869", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-25T04-58-33.658869.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-25T04-58-33.658869.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_01_25T04_58_33.658869", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-25T04-58-33.658869.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-25T04-58-33.658869.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_01_25T04_58_33.658869", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-25T04-58-33.658869.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-25T04-58-33.658869.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_01_25T04_58_33.658869", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-25T04-58-33.658869.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-25T04-58-33.658869.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_01_25T04_58_33.658869", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-25T04-58-33.658869.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-25T04-58-33.658869.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_01_25T04_58_33.658869", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-25T04-58-33.658869.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-25T04-58-33.658869.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_01_25T04_58_33.658869", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-25T04-58-33.658869.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-25T04-58-33.658869.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_01_25T04_58_33.658869", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-25T04-58-33.658869.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-25T04-58-33.658869.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_01_25T04_58_33.658869", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-25T04-58-33.658869.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-25T04-58-33.658869.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_01_25T04_58_33.658869", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-25T04-58-33.658869.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-25T04-58-33.658869.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_01_25T04_58_33.658869", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-25T04-58-33.658869.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-25T04-58-33.658869.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_01_25T04_58_33.658869", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-25T04-58-33.658869.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-25T04-58-33.658869.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_01_25T04_58_33.658869", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-25T04-58-33.658869.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-25T04-58-33.658869.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_01_25T04_58_33.658869", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-25T04-58-33.658869.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-25T04-58-33.658869.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_01_25T04_58_33.658869", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-25T04-58-33.658869.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-25T04-58-33.658869.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_01_25T04_58_33.658869", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-25T04-58-33.658869.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-25T04-58-33.658869.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_01_25T04_58_33.658869", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-25T04-58-33.658869.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-25T04-58-33.658869.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_01_25T04_58_33.658869", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-25T04-58-33.658869.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-25T04-58-33.658869.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_01_25T04_58_33.658869", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-25T04-58-33.658869.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-25T04-58-33.658869.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_01_25T04_58_33.658869", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-25T04-58-33.658869.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-25T04-58-33.658869.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_01_25T04_58_33.658869", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-25T04-58-33.658869.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-25T04-58-33.658869.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_01_25T04_58_33.658869", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-25T04-58-33.658869.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-25T04-58-33.658869.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_01_25T04_58_33.658869", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-25T04-58-33.658869.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-25T04-58-33.658869.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_01_25T04_58_33.658869", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-25T04-58-33.658869.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-25T04-58-33.658869.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_01_25T04_58_33.658869", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-25T04-58-33.658869.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-25T04-58-33.658869.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_01_25T04_58_33.658869", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-25T04-58-33.658869.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-25T04-58-33.658869.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_01_25T04_58_33.658869", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-25T04-58-33.658869.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-25T04-58-33.658869.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_01_25T04_58_33.658869", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-25T04-58-33.658869.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-25T04-58-33.658869.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_01_25T04_58_33.658869", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-25T04-58-33.658869.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-25T04-58-33.658869.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_01_25T04_58_33.658869", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-25T04-58-33.658869.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-25T04-58-33.658869.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_01_25T04_58_33.658869", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-25T04-58-33.658869.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-25T04-58-33.658869.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_01_25T04_58_33.658869", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-25T04-58-33.658869.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-25T04-58-33.658869.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_01_25T04_58_33.658869", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-25T04-58-33.658869.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-25T04-58-33.658869.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_01_25T04_58_33.658869", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-25T04-58-33.658869.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-25T04-58-33.658869.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_01_25T04_58_33.658869", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-25T04-58-33.658869.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-25T04-58-33.658869.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_01_25T04_58_33.658869", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-25T04-58-33.658869.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-25T04-58-33.658869.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_01_25T04_58_33.658869", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-25T04-58-33.658869.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-25T04-58-33.658869.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_01_25T04_58_33.658869", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-25T04-58-33.658869.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-25T04-58-33.658869.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_01_25T04_58_33.658869", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-25T04-58-33.658869.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-25T04-58-33.658869.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_01_25T04_58_33.658869", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-25T04-58-33.658869.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-25T04-58-33.658869.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_01_25T04_58_33.658869", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-25T04-58-33.658869.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-25T04-58-33.658869.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_01_25T04_58_33.658869", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-25T04-58-33.658869.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-25T04-58-33.658869.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_01_25T04_58_33.658869", "path": ["**/details_harness|winogrande|5_2024-01-25T04-58-33.658869.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-01-25T04-58-33.658869.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_01_25T04_58_33.658869", "path": ["results_2024-01-25T04-58-33.658869.parquet"]}, {"split": "latest", "path": ["results_2024-01-25T04-58-33.658869.parquet"]}]}]}
2024-01-25T05:01:19+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of Aryanne/sheared-plus-westlake-nearest-50_75p Dataset automatically created during the evaluation run of model Aryanne/sheared-plus-westlake-nearest-50_75p on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-01-25T04:58:33.658869(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of Aryanne/sheared-plus-westlake-nearest-50_75p\n\n\n\nDataset automatically created during the evaluation run of model Aryanne/sheared-plus-westlake-nearest-50_75p on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-25T04:58:33.658869(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of Aryanne/sheared-plus-westlake-nearest-50_75p\n\n\n\nDataset automatically created during the evaluation run of model Aryanne/sheared-plus-westlake-nearest-50_75p on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-25T04:58:33.658869(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ 6, 201, 67, 4, 40, 29, 3, 4, 9, 6, 5, 7, 4, 7, 10, 9, 5, 9, 8, 10, 46, 8, 7, 10, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of Aryanne/sheared-plus-westlake-nearest-50_75p\n\n\n\nDataset automatically created during the evaluation run of model Aryanne/sheared-plus-westlake-nearest-50_75p on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2024-01-25T04:58:33.658869(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):## Dataset Details### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:## Uses### Direct Use### Out-of-Scope Use## Dataset Structure## Dataset Creation### Curation Rationale### Source Data#### Data Collection and Processing#### Who are the source data producers?### Annotations [optional]#### Annotation process#### Who are the annotators?#### Personal and Sensitive Information## Bias, Risks, and Limitations### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:## Glossary [optional]## More Information [optional]" ]
[ -0.08193421363830566, 0.22370152175426483, -0.005799685139209032, 0.0496700257062912, 0.09047389775514603, -0.033046118915081024, 0.009794887155294418, 0.11897403746843338, -0.005072368308901787, 0.16110265254974365, -0.02385074272751808, 0.09265277534723282, 0.08601987361907959, 0.17013534903526306, 0.017138991504907608, -0.12180165946483612, 0.017882075160741806, -0.07347430288791656, 0.08031906932592392, 0.07459796220064163, 0.08443053066730499, -0.09219348430633545, 0.06460821628570557, -0.01197133306413889, -0.018301280215382576, 0.004580140579491854, -0.07924597710371017, -0.029240574687719345, 0.08286986500024796, 0.06983990967273712, 0.023334281519055367, -0.02780872955918312, 0.0033139055594801903, -0.23986905813217163, 0.018646998330950737, 0.06847809255123138, 0.013005419634282589, 0.0633331686258316, 0.14350570738315582, -0.05879203602671623, 0.047533899545669556, -0.06457286328077316, 0.05432319641113281, 0.05076047033071518, -0.11605740338563919, -0.09134943783283234, -0.13793933391571045, 0.007513021118938923, 0.08939176797866821, 0.054652873426675797, -0.03517706319689751, 0.11429454386234283, -0.0072707487270236015, 0.03170446306467056, 0.11850334703922272, -0.12753209471702576, -0.025016365572810173, -0.021901212632656097, 0.02317201718688011, 0.020994726568460464, -0.11768857389688492, -0.024007609114050865, 0.021940935403108597, 0.03928760811686516, 0.03401578217744827, 0.009678442031145096, -0.04652278870344162, 0.0038665644824504852, -0.1161518469452858, -0.07479902356863022, 0.14159712195396423, 0.01528630405664444, -0.05131566897034645, -0.15311552584171295, -0.0315127931535244, -0.017797578126192093, -0.014274309389293194, -0.021720152348279953, 0.020072825253009796, -0.01349941547960043, 0.07155092805624008, -0.016914643347263336, -0.09423471987247467, -0.0028908406384289265, -0.026983769610524178, 0.014528270810842514, 0.013714764267206192, -0.0075450739823281765, 0.00016441495972685516, 0.1069650873541832, -0.020169660449028015, -0.10017021000385284, -0.07638821750879288, -0.04014112427830696, -0.11289507895708084, -0.04621779918670654, 0.019091561436653137, -0.04409331828355789, 0.03578018397092819, 0.23355425894260406, -0.06123900040984154, 0.03002302534878254, -0.08295059204101562, -0.010638316161930561, 0.10718359798192978, 0.05941975861787796, -0.041650690138339996, -0.07988008856773376, -0.014571581967175007, 0.03103359043598175, 0.01868734136223793, -0.007580438628792763, 0.02357151359319687, 0.0695161297917366, 0.03750478848814964, 0.11255214363336563, 0.11756859719753265, 0.007953470572829247, -0.07326701283454895, -0.02165958844125271, 0.21953219175338745, -0.17495565116405487, 0.013371874578297138, 0.006356157828122377, -0.036757875233888626, -0.09492789953947067, 0.04438762366771698, -0.0049287970177829266, -0.06844305247068405, 0.09350622445344925, -0.07184985280036926, -0.04748718440532684, -0.08381354063749313, -0.040164392441511154, 0.04896799102425575, -0.03223288431763649, -0.036874618381261826, -0.06000518053770065, -0.13428114354610443, -0.07988282293081284, 0.015406375750899315, -0.09072327613830566, -0.010160045698285103, 0.019755663350224495, -0.023797757923603058, -0.008227134123444557, -0.01529413741081953, 0.12892349064350128, -0.06689410656690598, 0.007006493862718344, 0.003373187966644764, 0.002595706144347787, 0.10316887497901917, 0.04517480731010437, -0.12429796159267426, 0.08246061205863953, -0.04561489075422287, 0.1234959289431572, -0.08405318111181259, 0.011527449823915958, -0.12585583329200745, 0.008367612026631832, -0.042852263897657394, 0.01590830832719803, 0.032340891659259796, 0.10749384015798569, -0.25895801186561584, 0.014186765067279339, 0.11034121364355087, -0.10638639330863953, -0.09880859404802322, 0.04759946092963219, -0.04392580687999725, 0.06167807802557945, 0.049709297716617584, 0.0927715003490448, 0.12491077929735184, -0.06753173470497131, -0.13614369928836823, -0.09300247579813004, -0.02253306843340397, 0.09936919808387756, 0.04968928545713425, -0.07224830985069275, 0.16173197329044342, 0.041208114475011826, -0.0029110878240317106, -0.06867269426584244, -0.01848091185092926, -0.06028443202376366, -0.01702277921140194, -0.054965902119874954, -0.09599968045949936, -0.00345240393653512, -0.06570995599031448, -0.0415399968624115, -0.08308474719524384, 0.041884902864694595, 0.06958628445863724, 0.01099805161356926, 0.02079603262245655, -0.08027958869934082, 0.060295186936855316, 0.004667297005653381, 0.015475461259484291, -0.22500325739383698, -0.07524457573890686, 0.036369845271110535, -0.12989197671413422, 0.03824297711253166, -0.013903249986469746, 0.01911689154803753, 0.03464560583233833, -0.0032727145589888096, 0.005396203603595495, 0.018714115023612976, -0.0052857049740850925, -0.013454772531986237, -0.12850770354270935, -0.035400133579969406, -0.07601653784513474, 0.029904935508966446, -0.12407798320055008, -0.024814706295728683, 0.08020089566707611, 0.1683701127767563, 0.01209644228219986, -0.09336844086647034, 0.07028356194496155, -0.007946867495775223, -0.04187784716486931, -0.06699440628290176, 0.0038473783060908318, -0.009183168411254883, 0.06349365413188934, 0.04982859641313553, -0.19382938742637634, -0.14111162722110748, 0.07270850241184235, 0.10587763786315918, -0.0646958202123642, -0.03900819271802902, -0.0830632820725441, -0.05591418966650963, -0.08256179839372635, -0.06617829948663712, 0.07329989969730377, 0.07155182957649231, 0.04110374674201012, -0.061295170336961746, -0.0889921709895134, -0.004628896247595549, 0.056096505373716354, -0.06669248640537262, 0.09465906023979187, 0.050230979919433594, -0.08522902429103851, 0.0907006487250328, 0.032855842262506485, 0.1296808421611786, 0.09620580822229385, -0.004754434339702129, -0.11490990221500397, -0.00900796614587307, 0.05307956039905548, 0.034887101501226425, 0.07281695306301117, -0.01714126206934452, 0.029376087710261345, 0.08669127523899078, 0.001057283254340291, 0.048835303634405136, -0.057244185358285904, 0.041856516152620316, 0.026091700419783592, 0.001776331220753491, 0.032545190304517746, 0.01622072607278824, 0.00860499031841755, 0.06360029429197311, 0.02470513992011547, 0.10843265056610107, -0.021939117461442947, -0.04613834246993065, -0.08219273388385773, 0.1366610825061798, -0.0961068794131279, -0.2402268946170807, -0.15704435110092163, -0.04944836348295212, -0.017121193930506706, -0.0023995027877390385, 0.05206011235713959, -0.014768396504223347, -0.1103711724281311, -0.10895499587059021, 0.023140545934438705, 0.03571775183081627, -0.10940057784318924, -0.014847819693386555, 0.037783268839120865, 0.03897561877965927, -0.1532830446958542, 0.029688021168112755, 0.03566828742623329, -0.03140277788043022, -0.008732501417398453, 0.11334535479545593, 0.12237362563610077, 0.06190487742424011, 0.04170410335063934, -0.024043135344982147, 0.0026940591633319855, 0.19834302365779877, -0.08911170065402985, 0.03955492749810219, 0.11725413799285889, -0.0491374135017395, 0.07295972853899002, 0.15075340867042542, 0.006303628906607628, -0.09468428790569305, 0.03185147047042847, 0.10045509040355682, -0.05546686798334122, -0.26274141669273376, -0.06053711473941803, -0.03933822363615036, -0.0120134511962533, 0.08363349735736847, 0.07562433183193207, -0.018236510455608368, 0.01734885387122631, -0.09850168973207474, -0.05227963253855705, -0.051987189799547195, 0.0650169774889946, 0.08368047326803207, 0.022370561957359314, 0.05780116468667984, -0.04284512624144554, 0.0512542799115181, 0.11451990902423859, 0.018055783584713936, 0.18327496945858002, -0.03205028176307678, 0.1842377483844757, 0.09223049134016037, 0.0734596997499466, -0.039451539516448975, 0.06745864450931549, 0.010230271145701408, 0.05734378471970558, 0.010167282074689865, -0.09731600433588028, -0.026875650510191917, 0.07295586913824081, -0.023249775171279907, -0.032048147171735764, 0.04835136607289314, -0.005536789074540138, 0.061176128685474396, 0.15048152208328247, 0.004574714694172144, -0.13867983222007751, -0.06400614231824875, 0.050574131309986115, -0.04361142963171005, -0.10262180119752884, -0.02257833257317543, 0.056684285402297974, -0.11769168078899384, 0.015857618302106857, -0.029581425711512566, 0.09656261652708054, -0.11675609648227692, -0.025512244552373886, -0.002724501071497798, 0.09630224108695984, -0.010175412520766258, 0.09713363647460938, -0.12288886308670044, 0.09971432387828827, -0.0020481932442635298, 0.06662493944168091, -0.05844511091709137, 0.0702696293592453, -0.016897069290280342, -0.07761654257774353, 0.1379057765007019, 0.00019750796491280198, -0.11478094011545181, -0.027588307857513428, -0.1245964989066124, -0.012973999604582787, 0.05296139046549797, -0.11559385061264038, 0.10050223022699356, 0.015207230113446712, -0.01741110533475876, -0.03628063201904297, -0.026359686627984047, -0.12113800644874573, -0.2206258326768875, 0.10383622348308563, -0.11646721512079239, 0.046007029712200165, -0.04005841538310051, -0.049071408808231354, -0.05554724484682083, 0.17693328857421875, -0.09414295107126236, -0.07712312787771225, -0.1344251185655594, 0.02663307823240757, 0.18880069255828857, -0.06224803999066353, 0.07882098853588104, -0.04252339154481888, 0.18123319745063782, 0.004468668717890978, -0.05062314495444298, 0.0015426877653226256, -0.08760123699903488, -0.1460966318845749, -0.03988386318087578, 0.1517103910446167, 0.05275158956646919, -0.004611447919160128, 0.0021496813278645277, 0.059796642512083054, -0.005959751084446907, -0.0825570747256279, 0.05034917965531349, 0.05283508449792862, 0.12642979621887207, 0.005002958700060844, -0.03941921144723892, -0.1088624820113182, -0.10515650361776352, -0.09638901799917221, 0.048602525144815445, 0.15302985906600952, -0.053169894963502884, 0.13164910674095154, 0.10834807902574539, -0.10231064260005951, -0.16955526173114777, -0.04937221482396126, 0.029595624655485153, -0.01612902246415615, 0.11488070338964462, -0.17771942913532257, 0.07096090167760849, 0.06259095668792725, -0.016870494931936264, 0.1339203417301178, -0.18445532023906708, -0.15003769099712372, 0.033789247274398804, 0.026721632108092308, -0.18931671977043152, -0.14438354969024658, -0.11755820363759995, -0.007248046807944775, -0.17513950169086456, 0.1315261721611023, 0.01730341650545597, 0.02514583431184292, -0.015288702212274075, 0.04978905990719795, 0.04100840166211128, -0.0572214350104332, 0.12297502905130386, 0.022518416866660118, 0.03167617321014404, -0.09245599061250687, -0.012192063964903355, 0.022200608626008034, -0.052051108330488205, 0.07437185198068619, 0.029223227873444557, 0.05761898308992386, -0.09960588067770004, -0.03381681069731712, -0.040078114718198776, 0.044194627553224564, -0.06252820044755936, -0.04931115731596947, -0.04900185018777847, 0.07497337460517883, 0.07559928297996521, -0.021142970770597458, 0.02028089389204979, -0.031320784240961075, 0.01701359637081623, 0.22340664267539978, 0.091769739985466, 0.029928844422101974, -0.10259335488080978, -0.026248939335346222, -0.00675384234637022, -0.01311126071959734, -0.12532150745391846, 0.050134968012571335, 0.09829528629779816, 0.03404473140835762, 0.07806196808815002, -0.025114404037594795, -0.16717886924743652, 0.0010801299940794706, 0.09681215137243271, -0.10196849703788757, -0.1912408322095871, 0.027020147070288658, 0.09174976497888565, -0.13299226760864258, -0.0600544810295105, 0.0832841545343399, 0.03678445518016815, -0.023178894072771072, 0.014873949810862541, 0.07035482674837112, 0.057864390313625336, 0.10745707899332047, -0.012739032506942749, 0.048753246665000916, -0.0814589262008667, 0.11444947868585587, 0.12116599828004837, -0.07823366671800613, 0.02125001884996891, 0.07481836527585983, -0.06382935494184494, -0.05167181417346001, 0.015313445590436459, 0.045811451971530914, 0.00178294419310987, -0.03080838918685913, 0.011137564666569233, -0.027511104941368103, 0.07241055369377136, 0.10414792597293854, -0.007038856856524944, 0.03688434138894081, 0.020627779886126518, -0.008038139902055264, -0.05658728629350662, 0.12521706521511078, 0.0711008682847023, 0.03464424982666969, -0.03534428030252457, 0.0035699140280485153, 0.008707982487976551, -0.02333942987024784, 0.019271288067102432, -0.03479261323809624, -0.016974493861198425, -0.017785485833883286, -0.17353995144367218, 0.02453264780342579, -0.09033393859863281, -0.02227609045803547, -0.015354671515524387, -0.02974977158010006, -0.033223919570446014, 0.030079808086156845, -0.0506664402782917, -0.07151439040899277, -0.04640871658921242, 0.09382416307926178, -0.20177249610424042, 0.009511783719062805, 0.08706603199243546, -0.07524057477712631, 0.07593678683042526, 0.02195105515420437, -0.01357536856085062, 0.009724140167236328, -0.07720209658145905, -0.023904742673039436, -0.02805112674832344, 0.03729257360100746, 0.032614246010780334, -0.1504424810409546, -0.010791556909680367, 0.021495096385478973, -0.07175957411527634, -0.03220077604055405, 0.030044637620449066, -0.14611579477787018, 0.023872967809438705, 0.06291652470827103, -0.031059660017490387, -0.03930523991584778, 0.04003428667783737, 0.07527417689561844, 0.014157865196466446, 0.1171068474650383, -0.0036843298003077507, 0.029352091252803802, -0.16899894177913666, -0.04008015990257263, 0.011378628201782703, 0.0010855495929718018, 0.008513657376170158, 0.0058195036835968494, 0.04143859073519707, -0.020243331789970398, 0.19689777493476868, -0.031646907329559326, 0.06125708296895027, 0.04503310099244118, 0.007043088786303997, -0.060724589973688126, 0.03339516744017601, 0.0683118999004364, 0.0005852561444044113, 0.0073343487456440926, 0.01585199497640133, -0.04061690717935562, -0.03339811787009239, -0.05884328857064247, 0.0807986706495285, 0.1497420072555542, 0.1337941735982895, -0.021922040730714798, 0.06294244527816772, -0.14687812328338623, -0.08486752212047577, 0.026609761640429497, -0.07127068191766739, 0.02610100619494915, -0.07009457796812057, 0.05577003210783005, 0.09642402827739716, -0.132171168923378, 0.13428235054016113, -0.0550677590072155, -0.04473884403705597, -0.02537570707499981, -0.16191335022449493, -0.040809113532304764, 0.015901727601885796, 0.0157181229442358, -0.09618905186653137, 0.11506382375955582, 0.10610710829496384, -0.0007952752057462931, -0.020950254052877426, 0.11780410259962082, -0.06910094618797302, -0.06316297501325607, -0.015296832658350468, 0.013634971342980862, 0.02962332032620907, 0.014104324392974377, 0.0763464942574501, 0.012411943636834621, 0.07120148837566376, 0.06892311573028564, 0.0830952376127243, 0.07048457860946655, 0.025031762197613716, -0.03502514958381653, -0.06372373551130295, 0.0004994479822926223, -0.011387145146727562, -0.05743074044585228, 0.17389072477817535, 0.04813652113080025, 0.030448561534285545, 0.010258514434099197, 0.20340140163898468, 0.005990992765873671, -0.055808283388614655, -0.13639242947101593, 0.05371838063001633, 0.021457968279719353, 0.016238996759057045, 0.03460698947310448, -0.14178496599197388, 0.035572782158851624, 0.17716681957244873, 0.08253943175077438, 0.025170965120196342, -0.0003233338356949389, 0.032698679715394974, 0.023850534111261368, -0.03372330591082573, 0.023250587284564972, 0.04002072662115097, 0.14530284702777863, -0.038354091346263885, 0.036382611840963364, -0.020757390186190605, -0.03774410858750343, -0.02053338661789894, 0.10821796208620071, -0.050569962710142136, 0.027709895744919777, -0.06570466607809067, 0.07726780325174332, -0.035577159374952316, -0.28912076354026794, -0.012510719709098339, -0.08552732318639755, -0.13198527693748474, -0.018760204315185547, 0.036088719964027405, -0.019188256934285164, 0.017650056630373, 0.05399305373430252, -0.016830919310450554, 0.18044434487819672, 0.013169072568416595, -0.05445893481373787, -0.0568169429898262, 0.06748459488153458, -0.0646534264087677, 0.22579671442508698, 0.0038068261928856373, 0.026869667693972588, 0.09592638164758682, -0.0035742581821978092, -0.17515024542808533, 0.02587289921939373, 0.0863872841000557, -0.03375555947422981, 0.05670618265867233, 0.17159639298915863, -0.0072355675511062145, 0.0662740021944046, 0.04923167824745178, 0.023116247728466988, 0.0376594103872776, 0.030969606712460518, 0.024444391950964928, -0.0826273113489151, 0.06822939217090607, -0.07629280537366867, 0.14030495285987854, 0.11225968599319458, -0.024648159742355347, 0.01091737300157547, -0.06320944428443909, 0.0629892498254776, -0.03190358728170395, 0.1018715500831604, -0.0038069430738687515, -0.17396622896194458, 0.06290234625339508, 0.08896201848983765, 0.06899767369031906, -0.196071594953537, -0.0688786581158638, 0.10502711683511734, -0.053288813680410385, -0.013245980255305767, 0.12153033912181854, 0.043450962752103806, 0.027088293805718422, -0.058941613882780075, -0.05579925328493118, 0.011367958970367908, 0.10802160948514938, -0.07253406196832657, -0.023590464144945145 ]
6f2bcf101e14ff4ae853fa9e38a10427a90af9e4
SafeCodeDPO is an experimental synthetic code dataset for DPO fine-tuning. * 3 iterations of deepseek-coder-33B-instruct with unique instructions are used to generate the dataset: 1) (temp=0.5) a problem for a python coding competition 2) (temp=0.4) slow, intentionally vulnerable problem solution (rejected) 3) (temp=0.3) optimized, secure code solution (chosen)
CyberNative/SafeCodeDPO
[ "license:mit", "region:us" ]
2024-01-25T05:10:36+00:00
{"license": "mit"}
2024-01-25T05:21:16+00:00
[]
[]
TAGS #license-mit #region-us
SafeCodeDPO is an experimental synthetic code dataset for DPO fine-tuning. * 3 iterations of deepseek-coder-33B-instruct with unique instructions are used to generate the dataset: 1) (temp=0.5) a problem for a python coding competition 2) (temp=0.4) slow, intentionally vulnerable problem solution (rejected) 3) (temp=0.3) optimized, secure code solution (chosen)
[]
[ "TAGS\n#license-mit #region-us \n" ]
[ 11 ]
[ "passage: TAGS\n#license-mit #region-us \n" ]
[ 0.026221778243780136, -0.033018264919519424, -0.008281232789158821, -0.05295303836464882, 0.052470896393060684, 0.06768012046813965, 0.1598525494337082, 0.04655371606349945, 0.23683255910873413, -0.05407243221998215, 0.11752297729253769, 0.08923697471618652, 0.004284696187824011, -0.0009730930323712528, 0.014216204173862934, -0.17134642601013184, 0.04864625632762909, -0.02878100797533989, 0.08764812350273132, 0.032233644276857376, -0.006205103360116482, -0.03845774009823799, -0.0022142508532851934, -0.03178790956735611, -0.057939812541007996, 0.03869890421628952, 0.045729056000709534, -0.02754949778318405, 0.14189864695072174, -0.021783310920000076, 0.13335508108139038, 0.046146418899297714, -0.011738095432519913, -0.2486042082309723, 0.008575023151934147, -0.07252951711416245, -0.11333522200584412, 0.016201216727495193, 0.035761721432209015, -0.010069100186228752, 0.032174937427043915, 0.11049123108386993, -0.011680051684379578, 0.06288356333971024, -0.2015703022480011, -0.20486389100551605, -0.07508610188961029, -0.07555478066205978, 0.0589042492210865, 0.030872387811541557, 0.05628744140267372, 0.1426718831062317, -0.18022038042545319, -0.0018841808196157217, 0.04129622131586075, -0.3510737717151642, 0.09011197835206985, 0.19666501879692078, 0.06407395005226135, 0.07872317731380463, -0.04774639382958412, 0.06726468354463577, 0.07745297998189926, -0.02402484230697155, -0.10679105669260025, -0.06142130121588707, 0.040939174592494965, 0.15604156255722046, -0.03852643445134163, -0.10356393456459045, 0.2591084837913513, -0.023262828588485718, -0.04234466329216957, 0.08201269060373306, -0.02980397455394268, -0.040379155427217484, 0.04404358193278313, 0.044016025960445404, 0.036236923187971115, 0.182089164853096, 0.1260262131690979, -0.03375067934393883, -0.16269677877426147, -0.030629513785243034, -0.2528207004070282, 0.07418664544820786, -0.003647059667855501, 0.10666298121213913, -0.20037521421909332, 0.03286786004900932, -0.15483668446540833, -0.009493621066212654, -0.02952384203672409, -0.059835705906152725, 0.05229754373431206, -0.0237403754144907, -0.04600388556718826, 0.07238677144050598, 0.08390641957521439, 0.2046167105436325, 0.023024363443255424, 0.016697337850928307, -0.10405295342206955, 0.15052515268325806, 0.019140364602208138, 0.024860305711627007, 0.179348424077034, 0.07677878439426422, -0.04891882464289665, -0.2251969277858734, 0.027894439175724983, -0.03671982139348984, -0.1441805064678192, 0.015881337225437164, -0.1542915552854538, 0.1736440360546112, -0.04078168794512749, -0.06919530034065247, -0.08578147739171982, 0.09790384024381638, 0.07768166810274124, -0.021921472623944283, -0.023105677217245102, -0.01381723117083311, 0.03522264584898949, -0.048196230083703995, -0.11687057465314865, 0.018241960555315018, 0.11869648098945618, 0.12573401629924774, -0.1483907401561737, -0.008189842104911804, -0.017200417816638947, 0.019065292552113533, 0.09696817398071289, -0.112403005361557, 0.028845038264989853, -0.09672309458255768, -0.13033071160316467, 0.036653537303209305, 0.017736904323101044, -0.019008556380867958, 0.1340927630662918, 0.061849117279052734, 0.056560322642326355, -0.011025321669876575, -0.07250872999429703, -0.14035539329051971, -0.08679798245429993, 0.1058693379163742, -0.046787332743406296, 0.010320915840566158, -0.24556252360343933, -0.014234079979360104, -0.14995723962783813, 0.059662189334630966, -0.0037668521981686354, -0.08819212019443512, -0.07740068435668945, 0.21408265829086304, 0.0018596589798107743, 0.04301392287015915, -0.1078512966632843, 0.054903753101825714, -0.06764797121286392, 0.10065380483865738, -0.12895582616329193, -0.06441528350114822, 0.1613781899213791, -0.13135331869125366, -0.14002031087875366, 0.0033312994055449963, -0.009472889825701714, 0.12053907662630081, 0.0802001804113388, 0.44566696882247925, -0.058881040662527084, -0.16201181709766388, 0.1270403116941452, 0.17969723045825958, -0.13685379922389984, -0.25928929448127747, 0.12393020838499069, -0.1636963188648224, -0.16647985577583313, 0.0040023741312325, -0.006962866988033056, 0.08049977570772171, -0.03446655720472336, -0.056274134665727615, 0.042339932173490524, 0.024350708350539207, 0.029094615951180458, 0.01740112341940403, 0.07037191838026047, -0.1023021712899208, 0.08444856107234955, 0.058610700070858, -0.014111426658928394, 0.15077349543571472, 0.011494536884129047, -0.05393160134553909, 0.014761670492589474, 0.044013332575559616, -0.015627963468432426, -0.05899091437458992, -0.09661509096622467, 0.019826244562864304, -0.031149597838521004, 0.08229395002126694, 0.1699674129486084, 0.023824702948331833, -0.02797185815870762, 0.028922779485583305, 0.028606392443180084, 0.1009954959154129, 0.06960704177618027, 0.03099375218153, -0.04839283227920532, 0.04952205345034599, -0.0417071171104908, -0.11430390179157257, -0.004862460307776928, -0.011735930107533932, 0.11975742131471634, -0.08906009048223495, -0.01223952230066061, 0.05951591953635216, -0.04513183981180191, 0.0019881438929587603, 0.0428374819457531, 0.0035966038703918457, 0.1388600617647171, 0.004440935328602791, -0.04352007433772087, 0.17440910637378693, -0.05288633331656456, 0.15533447265625, 0.1715822070837021, -0.07049662619829178, 0.015605369582772255, -0.1273636519908905, 0.003230511210858822, -0.014480113983154297, 0.05292887985706329, -0.05400136485695839, -0.05201306566596031, -0.01274962443858385, 0.014292534440755844, -0.03134604170918465, 0.01711403578519821, -0.06057267636060715, -0.08167021721601486, -0.10849859565496445, 0.018649224191904068, 0.20683221518993378, -0.22544461488723755, 0.1609548032283783, 0.40251004695892334, 0.15190774202346802, 0.21155193448066711, -0.12478897720575333, -0.002471078187227249, -0.06630261242389679, 0.026115071028470993, -0.024814706295728683, 0.13782677054405212, -0.13174867630004883, -0.01413064356893301, 0.03880728408694267, 0.0454997681081295, 0.0661163181066513, -0.17195898294448853, -0.15260353684425354, -0.0034879595041275024, -0.020591814070940018, -0.1749730259180069, 0.04874620959162712, -0.07595308125019073, 0.02181261032819748, 0.018216799944639206, -0.10832522064447403, 0.16837291419506073, -0.033566512167453766, -0.06695768237113953, 0.052613962441682816, -0.20581911504268646, -0.07900715619325638, -0.17772749066352844, -0.18375012278556824, 0.06050071492791176, 0.05760138854384422, 0.07903145253658295, -0.05951719731092453, -0.01922747679054737, 0.061719246208667755, -0.009363299235701561, -0.13802112638950348, -0.04235544428229332, -0.06993678212165833, 0.08744155615568161, -0.09474305808544159, -0.07518411427736282, -0.07833878695964813, -0.046996138989925385, -0.020961694419384003, 0.08125963062047958, -0.1039251759648323, 0.08903530240058899, 0.1493726521730423, 0.03651920333504677, 0.05440247058868408, -0.08271230012178421, 0.12693379819393158, -0.037743739783763885, -0.09459595382213593, 0.07307634502649307, 0.004350725095719099, 0.04920351505279541, 0.24039287865161896, 0.08962162584066391, -0.10578162968158722, -0.01780811697244644, -0.0968487411737442, -0.16405464708805084, -0.2553846538066864, -0.06823288649320602, -0.08744750916957855, 0.14417944848537445, 0.014636521227657795, 0.10712126642465591, 0.14313316345214844, 0.01343101728707552, 0.10255914181470871, -0.08983208239078522, -0.018939344212412834, 0.031209396198391914, 0.2135104089975357, -0.05208220332860947, 0.00838248711079359, -0.13684824109077454, -0.0256142970174551, 0.14601100981235504, 0.13798639178276062, 0.14503207802772522, 0.31421369314193726, 0.15292863547801971, 0.13410434126853943, 0.13474710285663605, 0.12333164364099503, 0.07403261214494705, 0.03444362059235573, -0.015304201282560825, -0.06035377085208893, -0.003846159903332591, 0.02816268615424633, 0.05421729013323784, 0.06724072247743607, -0.22906480729579926, 0.041139665991067886, -0.2661744952201843, 0.03544611483812332, -0.0854712724685669, 0.1161833181977272, -0.028890252113342285, 0.11051984131336212, 0.11386284977197647, 0.05553818494081497, -0.023278791457414627, 0.16036942601203918, 0.032686375081539154, -0.07703183591365814, 0.020292721688747406, 0.024695809930562973, 0.06633034348487854, 0.08606193959712982, 0.09550496190786362, -0.020778406411409378, -0.1831783503293991, 0.025963841006159782, 0.12212833017110825, -0.20747940242290497, 0.289523184299469, 0.013651901856064796, -0.0743619054555893, -0.01690039224922657, -0.06958060711622238, 0.008433517068624496, 0.12829731404781342, 0.10406835377216339, 0.05508929491043091, -0.2613787055015564, -0.13299626111984253, 0.046764206141233444, -0.00873907096683979, 0.11356569826602936, -0.0052223424427211285, -0.14201195538043976, -0.06640999764204025, 0.05814211815595627, -0.006591420155018568, 0.13023322820663452, -0.018290361389517784, -0.08173255622386932, -0.010230090469121933, 0.055564697831869125, -0.001312803477048874, -0.04580084979534149, 0.07523149996995926, 0.009008137509226799, 0.02259289287030697, -0.08178020268678665, 0.03887253627181053, -0.08071476966142654, -0.25375792384147644, 0.019298138096928596, -0.04987313598394394, 0.004092312417924404, -0.04684043675661087, -0.15448936820030212, -0.1129264086484909, -0.15445278584957123, 0.13100723922252655, -0.03675999864935875, 0.091565802693367, -0.0817658007144928, 0.13736046850681305, -0.08521489799022675, 0.05375019088387489, 0.00614814180880785, 0.03918716683983803, -0.017955513671040535, -0.1031481996178627, 0.09334362298250198, -0.1874227225780487, 0.023863423615694046, 0.010427716188132763, -0.056847453117370605, -0.01354232057929039, 0.03918023407459259, -0.08763083070516586, 0.21879427134990692, 0.3331502079963684, -0.011948764324188232, 0.22546616196632385, 0.35863226652145386, -0.13763751089572906, -0.23258967697620392, -0.1205512136220932, -0.3263251483440399, -0.09005610644817352, 0.17321562767028809, -0.18057219684123993, 0.04850830137729645, 0.16150830686092377, -0.10868281871080399, 0.22499866783618927, -0.22723928093910217, -0.04793389141559601, 0.1823979914188385, -0.038322996348142624, 0.4527989625930786, -0.1144307404756546, -0.1784561723470688, -0.03637253865599632, -0.16285361349582672, 0.12426037341356277, -0.026553882285952568, 0.06700495630502701, 0.02416347898542881, -0.011372359469532967, -0.009014161303639412, -0.04529716446995735, 0.2216065675020218, 0.0522729866206646, 0.10468899458646774, -0.09159468114376068, -0.17199653387069702, 0.1907423883676529, -0.0004908236442133784, -0.003372655250132084, -0.05411549657583237, -0.04850282520055771, -0.06871756166219711, 0.033092137426137924, -0.0334564633667469, 0.06195882335305214, 0.03364093229174614, -0.11903523653745651, -0.10248823463916779, 0.034111104905605316, -0.13155671954154968, -0.054850947111845016, 0.26421889662742615, -0.02080743946135044, 0.09609334170818329, 0.04959092289209366, -0.05474294349551201, -0.13538943231105804, 0.005736751481890678, -0.07534020394086838, -0.05711410939693451, 0.06573604047298431, -0.11453206837177277, -0.024341827258467674, 0.1293732225894928, -0.029497180134058, 0.09674722701311111, 0.08061115443706512, -0.07585363835096359, 0.02032829262316227, 0.15617427229881287, -0.07247176766395569, -0.10849180817604065, 0.04999847710132599, 0.04640531167387962, 0.17256882786750793, 0.004101871978491545, 0.02018604800105095, 0.08726977556943893, 0.045959215611219406, -0.007486662827432156, 0.007311292923986912, -0.11321697384119034, -0.04241771996021271, 0.0387241393327713, -0.005273692775517702, -0.10946331918239594, 0.16008898615837097, 0.056837860494852066, 0.004653505515307188, -0.06027700752019882, 0.09720424562692642, -0.06709636747837067, -0.07046061009168625, -0.1753035932779312, 0.018511172384023666, -0.12734080851078033, -0.09874535351991653, 0.06846235692501068, -0.09371624886989594, -0.04084605351090431, 0.08152704685926437, 0.046927981078624725, 0.14401860535144806, -0.006597559433430433, -0.023080874234437943, 0.149825319647789, -0.0884878933429718, -0.2241756170988083, 0.01969664730131626, -0.04083063453435898, -0.07065816223621368, -0.0007070365245454013, 0.06069544702768326, -0.0663156732916832, -0.11958606541156769, -0.20477768778800964, 0.10412076860666275, -0.12043121457099915, -0.03954985365271568, -0.1041841059923172, -0.053260523825883865, 0.07891252636909485, -0.02613759972155094, -0.04122013971209526, -0.047595683485269547, -0.16630595922470093, 0.054254453629255295, 0.07140932232141495, 0.11125344783067703, -0.0759999230504036, -0.018354382365942, 0.1398727148771286, 0.048581548035144806, 0.08479110151529312, 0.07578440010547638, 0.026255371049046516, 0.16728560626506805, -0.1708206981420517, -0.0542997270822525, 0.1068294569849968, -0.026716172695159912, 0.01994573324918747, 0.10631280392408371, -0.04839588701725006, 0.07042654603719711, -0.05095988139510155, 0.05859163776040077, -0.15704534947872162, -0.13073866069316864, -0.04184387996792793, 0.023728877305984497, -0.2260182797908783, 0.015071595087647438, -0.1769561767578125, 0.19692228734493256, -0.024228032678365707, 0.11490963399410248, 0.08052190393209457, 0.02052290178835392, 0.03539382666349411, -0.006019921973347664, 0.00946811307221651, -0.10524865239858627, -0.05784677714109421, -0.07560300827026367, -0.1168874129652977, -0.009665017947554588, 0.36614301800727844, 0.02430291846394539, -0.19682736694812775, 0.051222387701272964, 0.18285293877124786, 0.023639049381017685, -0.0073763905093073845, 0.26180747151374817, 0.08150359988212585, -0.023175053298473358, -0.1782374382019043, 0.0396091528236866, -0.08699734508991241, -0.15269799530506134, 0.11385007947683334, 0.09347525984048843, 0.05813581123948097, 0.022930078208446503, 0.10404518246650696, -0.035940010100603104, -0.05509711429476738, -0.13301853835582733, 0.13368983566761017, -0.001790675800293684, 0.0193882267922163, 0.0897885113954544, 0.19249756634235382, -0.045275162905454636, 0.05437124893069267, -0.07336640357971191, -0.001598604372702539, -0.15740543603897095, -0.13358698785305023, 0.06194563955068588, -0.08269550651311874, 0.06342913210391998, 0.050261519849300385, 0.04341990500688553, 0.31786394119262695, 0.039095040410757065, -0.046439893543720245, 0.003166865324601531, -0.14845187962055206, -0.08075450360774994, -0.06024569645524025, -0.03110554814338684, 0.028620192781090736, -0.13928957283496857, -0.09898591786623001, -0.06917677819728851, -0.130235955119133, -0.06539803743362427, 0.025270747020840645, 0.014251931570470333, -0.053083837032318115, -0.17625881731510162, -0.04808593541383743, -0.06644169986248016, 0.10105955600738525, -0.08462738990783691, 0.1516820639371872, 0.0022449472453445196, 0.030281953513622284, 0.07627002149820328, 0.09585131704807281, 0.018900424242019653, -0.06975197046995163, 0.05599058046936989, 0.12436293810606003, 0.01323844213038683, 0.1259988248348236, -0.06034265458583832, -0.019420607015490532, -0.014145253226161003, 0.14038437604904175, 0.304447740316391, -0.01856905221939087, -0.013814439997076988, -0.022110093384981155, 0.021388787776231766, 0.10893569141626358, 0.19800719618797302, -0.03437356278300285, 0.2551359534263611, -0.058974795043468475, 0.0756678432226181, -0.013180435635149479, -0.005362013820558786, -0.053146667778491974, 0.06074550002813339, 0.06268858164548874, -0.06877048313617706, -0.10191375762224197, 0.15178529918193817, -0.14985080063343048, 0.13306055963039398, 0.14678068459033966, -0.06057753041386604, 0.03797250986099243, 0.0007459368789568543, 0.19896264374256134, -0.03570213168859482, 0.0984780564904213, -0.10653308779001236, -0.10261140763759613, -0.14764924347400665, 0.037690844386816025, -0.36797797679901123, -0.1756322830915451, 0.11731542646884918, 0.14115898311138153, 0.1759258657693863, -0.012341637164354324, 0.056479312479496, 0.0033020609989762306, 0.08296097069978714, -0.04232487455010414, 0.1519634872674942, 0.0612073615193367, -0.017103128135204315, -0.15296664834022522, -0.20328094065189362, -0.0012039330322295427, -0.058561209589242935, 0.055583830922842026, -0.02269243635237217, 0.025347469374537468, 0.07746459543704987, -0.06768939644098282, -0.029180381447076797, -0.02352982573211193, -0.13262848556041718, 0.052229251712560654, -0.04354005306959152, 0.0320255309343338, -0.03958037868142128, -0.022394726052880287, -0.039987675845623016, 0.10721533745527267, -0.22402705252170563, -0.08517231047153473, 0.1422796994447708, -0.03421911224722862, 0.1542559564113617, -0.02848726324737072, -0.12159585952758789, -0.024955326691269875, -0.06977712363004684, 0.10887379199266434, -0.1419300138950348, 0.038592495024204254, 0.13747453689575195, 0.008710617199540138, 0.031119761988520622, -0.2533661723136902, 0.050644006580114365, -0.03556957095861435, -0.016733208671212196, -0.057031940668821335 ]
538d97eb95e5d649a49899958220c5304ec5c385
# Dataset Card for Evaluation run of kwchoi/DPO_mistral_7b_alpaca_0124_v1 <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [kwchoi/DPO_mistral_7b_alpaca_0124_v1](https://huggingface.co/kwchoi/DPO_mistral_7b_alpaca_0124_v1) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_kwchoi__DPO_mistral_7b_alpaca_0124_v1", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-01-25T05:43:08.624881](https://huggingface.co/datasets/open-llm-leaderboard/details_kwchoi__DPO_mistral_7b_alpaca_0124_v1/blob/main/results_2024-01-25T05-43-08.624881.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.6018672167168544, "acc_stderr": 0.0332990954041992, "acc_norm": 0.6077114987019184, "acc_norm_stderr": 0.034011337774239765, "mc1": 0.5177478580171359, "mc1_stderr": 0.017492470843075356, "mc2": 0.6676115272078241, "mc2_stderr": 0.015347165777845506 }, "harness|arc:challenge|25": { "acc": 0.5989761092150171, "acc_stderr": 0.014322255790719867, "acc_norm": 0.6339590443686007, "acc_norm_stderr": 0.014077223108470142 }, "harness|hellaswag|10": { "acc": 0.5954989046006771, "acc_stderr": 0.004897921845492105, "acc_norm": 0.7320254929296953, "acc_norm_stderr": 0.004419990741915989 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.32, "acc_stderr": 0.04688261722621503, "acc_norm": 0.32, "acc_norm_stderr": 0.04688261722621503 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.5925925925925926, "acc_stderr": 0.04244633238353228, "acc_norm": 0.5925925925925926, "acc_norm_stderr": 0.04244633238353228 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.618421052631579, "acc_stderr": 0.03953173377749194, "acc_norm": 0.618421052631579, "acc_norm_stderr": 0.03953173377749194 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.59, "acc_stderr": 0.04943110704237102, "acc_norm": 0.59, "acc_norm_stderr": 0.04943110704237102 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.6754716981132075, "acc_stderr": 0.02881561571343211, "acc_norm": 0.6754716981132075, "acc_norm_stderr": 0.02881561571343211 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.6458333333333334, "acc_stderr": 0.039994111357535424, "acc_norm": 0.6458333333333334, "acc_norm_stderr": 0.039994111357535424 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.45, "acc_stderr": 0.05, "acc_norm": 0.45, "acc_norm_stderr": 0.05 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.58, "acc_stderr": 0.049604496374885836, "acc_norm": 0.58, "acc_norm_stderr": 0.049604496374885836 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.42, "acc_stderr": 0.04960449637488584, "acc_norm": 0.42, "acc_norm_stderr": 0.04960449637488584 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.6473988439306358, "acc_stderr": 0.036430371689585475, "acc_norm": 0.6473988439306358, "acc_norm_stderr": 0.036430371689585475 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.45098039215686275, "acc_stderr": 0.04951218252396262, "acc_norm": 0.45098039215686275, "acc_norm_stderr": 0.04951218252396262 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.63, "acc_stderr": 0.04852365870939099, "acc_norm": 0.63, "acc_norm_stderr": 0.04852365870939099 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.5106382978723404, "acc_stderr": 0.03267862331014063, "acc_norm": 0.5106382978723404, "acc_norm_stderr": 0.03267862331014063 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.40350877192982454, "acc_stderr": 0.046151869625837026, "acc_norm": 0.40350877192982454, "acc_norm_stderr": 0.046151869625837026 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.5862068965517241, "acc_stderr": 0.04104269211806232, "acc_norm": 0.5862068965517241, "acc_norm_stderr": 0.04104269211806232 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.35714285714285715, "acc_stderr": 0.024677862841332786, "acc_norm": 0.35714285714285715, "acc_norm_stderr": 0.024677862841332786 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.40476190476190477, "acc_stderr": 0.04390259265377562, "acc_norm": 0.40476190476190477, "acc_norm_stderr": 0.04390259265377562 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.38, "acc_stderr": 0.048783173121456316, "acc_norm": 0.38, "acc_norm_stderr": 0.048783173121456316 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.6741935483870968, "acc_stderr": 0.026662010578567107, "acc_norm": 0.6741935483870968, "acc_norm_stderr": 0.026662010578567107 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.4729064039408867, "acc_stderr": 0.03512819077876106, "acc_norm": 0.4729064039408867, "acc_norm_stderr": 0.03512819077876106 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.62, "acc_stderr": 0.048783173121456316, "acc_norm": 0.62, "acc_norm_stderr": 0.048783173121456316 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.7272727272727273, "acc_stderr": 0.03477691162163659, "acc_norm": 0.7272727272727273, "acc_norm_stderr": 0.03477691162163659 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.7525252525252525, "acc_stderr": 0.03074630074212449, "acc_norm": 0.7525252525252525, "acc_norm_stderr": 0.03074630074212449 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.844559585492228, "acc_stderr": 0.026148483469153303, "acc_norm": 0.844559585492228, "acc_norm_stderr": 0.026148483469153303 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.5641025641025641, "acc_stderr": 0.025141801511177495, "acc_norm": 0.5641025641025641, "acc_norm_stderr": 0.025141801511177495 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.34074074074074073, "acc_stderr": 0.028897748741131147, "acc_norm": 0.34074074074074073, "acc_norm_stderr": 0.028897748741131147 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.6638655462184874, "acc_stderr": 0.03068473711513536, "acc_norm": 0.6638655462184874, "acc_norm_stderr": 0.03068473711513536 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.31788079470198677, "acc_stderr": 0.038020397601079024, "acc_norm": 0.31788079470198677, "acc_norm_stderr": 0.038020397601079024 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.7779816513761468, "acc_stderr": 0.017818849564796648, "acc_norm": 0.7779816513761468, "acc_norm_stderr": 0.017818849564796648 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.4861111111111111, "acc_stderr": 0.03408655867977748, "acc_norm": 0.4861111111111111, "acc_norm_stderr": 0.03408655867977748 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.7990196078431373, "acc_stderr": 0.028125972265654373, "acc_norm": 0.7990196078431373, "acc_norm_stderr": 0.028125972265654373 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.7383966244725738, "acc_stderr": 0.028609516716994934, "acc_norm": 0.7383966244725738, "acc_norm_stderr": 0.028609516716994934 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.6188340807174888, "acc_stderr": 0.03259625118416827, "acc_norm": 0.6188340807174888, "acc_norm_stderr": 0.03259625118416827 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.7022900763358778, "acc_stderr": 0.040103589424622034, "acc_norm": 0.7022900763358778, "acc_norm_stderr": 0.040103589424622034 }, "harness|hendrycksTest-international_law|5": { "acc": 0.7851239669421488, "acc_stderr": 0.037494924487096966, "acc_norm": 0.7851239669421488, "acc_norm_stderr": 0.037494924487096966 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.7129629629629629, "acc_stderr": 0.04373313040914761, "acc_norm": 0.7129629629629629, "acc_norm_stderr": 0.04373313040914761 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.7300613496932515, "acc_stderr": 0.03487825168497892, "acc_norm": 0.7300613496932515, "acc_norm_stderr": 0.03487825168497892 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.45535714285714285, "acc_stderr": 0.04726835553719099, "acc_norm": 0.45535714285714285, "acc_norm_stderr": 0.04726835553719099 }, "harness|hendrycksTest-management|5": { "acc": 0.7669902912621359, "acc_stderr": 0.04185832598928315, "acc_norm": 0.7669902912621359, "acc_norm_stderr": 0.04185832598928315 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8632478632478633, "acc_stderr": 0.022509033937077795, "acc_norm": 0.8632478632478633, "acc_norm_stderr": 0.022509033937077795 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.68, "acc_stderr": 0.046882617226215034, "acc_norm": 0.68, "acc_norm_stderr": 0.046882617226215034 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.776500638569604, "acc_stderr": 0.01489723522945071, "acc_norm": 0.776500638569604, "acc_norm_stderr": 0.01489723522945071 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.6705202312138728, "acc_stderr": 0.025305258131879706, "acc_norm": 0.6705202312138728, "acc_norm_stderr": 0.025305258131879706 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.36983240223463687, "acc_stderr": 0.01614588125605621, "acc_norm": 0.36983240223463687, "acc_norm_stderr": 0.01614588125605621 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.6797385620915033, "acc_stderr": 0.02671611838015685, "acc_norm": 0.6797385620915033, "acc_norm_stderr": 0.02671611838015685 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.6816720257234726, "acc_stderr": 0.026457225067811025, "acc_norm": 0.6816720257234726, "acc_norm_stderr": 0.026457225067811025 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.6759259259259259, "acc_stderr": 0.02604176620271716, "acc_norm": 0.6759259259259259, "acc_norm_stderr": 0.02604176620271716 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.4432624113475177, "acc_stderr": 0.029634838473766002, "acc_norm": 0.4432624113475177, "acc_norm_stderr": 0.029634838473766002 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.41590612777053454, "acc_stderr": 0.01258832385031362, "acc_norm": 0.41590612777053454, "acc_norm_stderr": 0.01258832385031362 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.6029411764705882, "acc_stderr": 0.02972215209928006, "acc_norm": 0.6029411764705882, "acc_norm_stderr": 0.02972215209928006 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.5980392156862745, "acc_stderr": 0.019835176484375387, "acc_norm": 0.5980392156862745, "acc_norm_stderr": 0.019835176484375387 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6727272727272727, "acc_stderr": 0.04494290866252091, "acc_norm": 0.6727272727272727, "acc_norm_stderr": 0.04494290866252091 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.7142857142857143, "acc_stderr": 0.028920583220675606, "acc_norm": 0.7142857142857143, "acc_norm_stderr": 0.028920583220675606 }, "harness|hendrycksTest-sociology|5": { "acc": 0.7014925373134329, "acc_stderr": 0.032357437893550424, "acc_norm": 0.7014925373134329, "acc_norm_stderr": 0.032357437893550424 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.84, "acc_stderr": 0.03684529491774708, "acc_norm": 0.84, "acc_norm_stderr": 0.03684529491774708 }, "harness|hendrycksTest-virology|5": { "acc": 0.4759036144578313, "acc_stderr": 0.038879718495972646, "acc_norm": 0.4759036144578313, "acc_norm_stderr": 0.038879718495972646 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.8128654970760234, "acc_stderr": 0.02991312723236804, "acc_norm": 0.8128654970760234, "acc_norm_stderr": 0.02991312723236804 }, "harness|truthfulqa:mc|0": { "mc1": 0.5177478580171359, "mc1_stderr": 0.017492470843075356, "mc2": 0.6676115272078241, "mc2_stderr": 0.015347165777845506 }, "harness|winogrande|5": { "acc": 0.7719021310181531, "acc_stderr": 0.011793015817663594 }, "harness|gsm8k|5": { "acc": 0.2585291887793783, "acc_stderr": 0.012059911372516123 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_kwchoi__DPO_mistral_7b_alpaca_0124_v1
[ "region:us" ]
2024-01-25T05:45:29+00:00
{"pretty_name": "Evaluation run of kwchoi/DPO_mistral_7b_alpaca_0124_v1", "dataset_summary": "Dataset automatically created during the evaluation run of model [kwchoi/DPO_mistral_7b_alpaca_0124_v1](https://huggingface.co/kwchoi/DPO_mistral_7b_alpaca_0124_v1) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_kwchoi__DPO_mistral_7b_alpaca_0124_v1\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-01-25T05:43:08.624881](https://huggingface.co/datasets/open-llm-leaderboard/details_kwchoi__DPO_mistral_7b_alpaca_0124_v1/blob/main/results_2024-01-25T05-43-08.624881.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.6018672167168544,\n \"acc_stderr\": 0.0332990954041992,\n \"acc_norm\": 0.6077114987019184,\n \"acc_norm_stderr\": 0.034011337774239765,\n \"mc1\": 0.5177478580171359,\n \"mc1_stderr\": 0.017492470843075356,\n \"mc2\": 0.6676115272078241,\n \"mc2_stderr\": 0.015347165777845506\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.5989761092150171,\n \"acc_stderr\": 0.014322255790719867,\n \"acc_norm\": 0.6339590443686007,\n \"acc_norm_stderr\": 0.014077223108470142\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.5954989046006771,\n \"acc_stderr\": 0.004897921845492105,\n \"acc_norm\": 0.7320254929296953,\n \"acc_norm_stderr\": 0.004419990741915989\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.32,\n \"acc_stderr\": 0.04688261722621503,\n \"acc_norm\": 0.32,\n \"acc_norm_stderr\": 0.04688261722621503\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.5925925925925926,\n \"acc_stderr\": 0.04244633238353228,\n \"acc_norm\": 0.5925925925925926,\n \"acc_norm_stderr\": 0.04244633238353228\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.618421052631579,\n \"acc_stderr\": 0.03953173377749194,\n \"acc_norm\": 0.618421052631579,\n \"acc_norm_stderr\": 0.03953173377749194\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.59,\n \"acc_stderr\": 0.04943110704237102,\n \"acc_norm\": 0.59,\n \"acc_norm_stderr\": 0.04943110704237102\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.6754716981132075,\n \"acc_stderr\": 0.02881561571343211,\n \"acc_norm\": 0.6754716981132075,\n \"acc_norm_stderr\": 0.02881561571343211\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.6458333333333334,\n \"acc_stderr\": 0.039994111357535424,\n \"acc_norm\": 0.6458333333333334,\n \"acc_norm_stderr\": 0.039994111357535424\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.45,\n \"acc_stderr\": 0.05,\n \"acc_norm\": 0.45,\n \"acc_norm_stderr\": 0.05\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.58,\n \"acc_stderr\": 0.049604496374885836,\n \"acc_norm\": 0.58,\n \"acc_norm_stderr\": 0.049604496374885836\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.42,\n \"acc_stderr\": 0.04960449637488584,\n \"acc_norm\": 0.42,\n \"acc_norm_stderr\": 0.04960449637488584\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.6473988439306358,\n \"acc_stderr\": 0.036430371689585475,\n \"acc_norm\": 0.6473988439306358,\n \"acc_norm_stderr\": 0.036430371689585475\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.45098039215686275,\n \"acc_stderr\": 0.04951218252396262,\n \"acc_norm\": 0.45098039215686275,\n \"acc_norm_stderr\": 0.04951218252396262\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.63,\n \"acc_stderr\": 0.04852365870939099,\n \"acc_norm\": 0.63,\n \"acc_norm_stderr\": 0.04852365870939099\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.5106382978723404,\n \"acc_stderr\": 0.03267862331014063,\n \"acc_norm\": 0.5106382978723404,\n \"acc_norm_stderr\": 0.03267862331014063\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.40350877192982454,\n \"acc_stderr\": 0.046151869625837026,\n \"acc_norm\": 0.40350877192982454,\n \"acc_norm_stderr\": 0.046151869625837026\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.5862068965517241,\n \"acc_stderr\": 0.04104269211806232,\n \"acc_norm\": 0.5862068965517241,\n \"acc_norm_stderr\": 0.04104269211806232\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.35714285714285715,\n \"acc_stderr\": 0.024677862841332786,\n \"acc_norm\": 0.35714285714285715,\n \"acc_norm_stderr\": 0.024677862841332786\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.40476190476190477,\n \"acc_stderr\": 0.04390259265377562,\n \"acc_norm\": 0.40476190476190477,\n \"acc_norm_stderr\": 0.04390259265377562\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.38,\n \"acc_stderr\": 0.048783173121456316,\n \"acc_norm\": 0.38,\n \"acc_norm_stderr\": 0.048783173121456316\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.6741935483870968,\n \"acc_stderr\": 0.026662010578567107,\n \"acc_norm\": 0.6741935483870968,\n \"acc_norm_stderr\": 0.026662010578567107\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.4729064039408867,\n \"acc_stderr\": 0.03512819077876106,\n \"acc_norm\": 0.4729064039408867,\n \"acc_norm_stderr\": 0.03512819077876106\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.62,\n \"acc_stderr\": 0.048783173121456316,\n \"acc_norm\": 0.62,\n \"acc_norm_stderr\": 0.048783173121456316\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.7272727272727273,\n \"acc_stderr\": 0.03477691162163659,\n \"acc_norm\": 0.7272727272727273,\n \"acc_norm_stderr\": 0.03477691162163659\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.7525252525252525,\n \"acc_stderr\": 0.03074630074212449,\n \"acc_norm\": 0.7525252525252525,\n \"acc_norm_stderr\": 0.03074630074212449\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.844559585492228,\n \"acc_stderr\": 0.026148483469153303,\n \"acc_norm\": 0.844559585492228,\n \"acc_norm_stderr\": 0.026148483469153303\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.5641025641025641,\n \"acc_stderr\": 0.025141801511177495,\n \"acc_norm\": 0.5641025641025641,\n \"acc_norm_stderr\": 0.025141801511177495\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.34074074074074073,\n \"acc_stderr\": 0.028897748741131147,\n \"acc_norm\": 0.34074074074074073,\n \"acc_norm_stderr\": 0.028897748741131147\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.6638655462184874,\n \"acc_stderr\": 0.03068473711513536,\n \"acc_norm\": 0.6638655462184874,\n \"acc_norm_stderr\": 0.03068473711513536\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.31788079470198677,\n \"acc_stderr\": 0.038020397601079024,\n \"acc_norm\": 0.31788079470198677,\n \"acc_norm_stderr\": 0.038020397601079024\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.7779816513761468,\n \"acc_stderr\": 0.017818849564796648,\n \"acc_norm\": 0.7779816513761468,\n \"acc_norm_stderr\": 0.017818849564796648\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.4861111111111111,\n \"acc_stderr\": 0.03408655867977748,\n \"acc_norm\": 0.4861111111111111,\n \"acc_norm_stderr\": 0.03408655867977748\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.7990196078431373,\n \"acc_stderr\": 0.028125972265654373,\n \"acc_norm\": 0.7990196078431373,\n \"acc_norm_stderr\": 0.028125972265654373\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.7383966244725738,\n \"acc_stderr\": 0.028609516716994934,\n \"acc_norm\": 0.7383966244725738,\n \"acc_norm_stderr\": 0.028609516716994934\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.6188340807174888,\n \"acc_stderr\": 0.03259625118416827,\n \"acc_norm\": 0.6188340807174888,\n \"acc_norm_stderr\": 0.03259625118416827\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.7022900763358778,\n \"acc_stderr\": 0.040103589424622034,\n \"acc_norm\": 0.7022900763358778,\n \"acc_norm_stderr\": 0.040103589424622034\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.7851239669421488,\n \"acc_stderr\": 0.037494924487096966,\n \"acc_norm\": 0.7851239669421488,\n \"acc_norm_stderr\": 0.037494924487096966\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.7129629629629629,\n \"acc_stderr\": 0.04373313040914761,\n \"acc_norm\": 0.7129629629629629,\n \"acc_norm_stderr\": 0.04373313040914761\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.7300613496932515,\n \"acc_stderr\": 0.03487825168497892,\n \"acc_norm\": 0.7300613496932515,\n \"acc_norm_stderr\": 0.03487825168497892\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.45535714285714285,\n \"acc_stderr\": 0.04726835553719099,\n \"acc_norm\": 0.45535714285714285,\n \"acc_norm_stderr\": 0.04726835553719099\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.7669902912621359,\n \"acc_stderr\": 0.04185832598928315,\n \"acc_norm\": 0.7669902912621359,\n \"acc_norm_stderr\": 0.04185832598928315\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8632478632478633,\n \"acc_stderr\": 0.022509033937077795,\n \"acc_norm\": 0.8632478632478633,\n \"acc_norm_stderr\": 0.022509033937077795\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.68,\n \"acc_stderr\": 0.046882617226215034,\n \"acc_norm\": 0.68,\n \"acc_norm_stderr\": 0.046882617226215034\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.776500638569604,\n \"acc_stderr\": 0.01489723522945071,\n \"acc_norm\": 0.776500638569604,\n \"acc_norm_stderr\": 0.01489723522945071\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.6705202312138728,\n \"acc_stderr\": 0.025305258131879706,\n \"acc_norm\": 0.6705202312138728,\n \"acc_norm_stderr\": 0.025305258131879706\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.36983240223463687,\n \"acc_stderr\": 0.01614588125605621,\n \"acc_norm\": 0.36983240223463687,\n \"acc_norm_stderr\": 0.01614588125605621\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.6797385620915033,\n \"acc_stderr\": 0.02671611838015685,\n \"acc_norm\": 0.6797385620915033,\n \"acc_norm_stderr\": 0.02671611838015685\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.6816720257234726,\n \"acc_stderr\": 0.026457225067811025,\n \"acc_norm\": 0.6816720257234726,\n \"acc_norm_stderr\": 0.026457225067811025\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.6759259259259259,\n \"acc_stderr\": 0.02604176620271716,\n \"acc_norm\": 0.6759259259259259,\n \"acc_norm_stderr\": 0.02604176620271716\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.4432624113475177,\n \"acc_stderr\": 0.029634838473766002,\n \"acc_norm\": 0.4432624113475177,\n \"acc_norm_stderr\": 0.029634838473766002\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.41590612777053454,\n \"acc_stderr\": 0.01258832385031362,\n \"acc_norm\": 0.41590612777053454,\n \"acc_norm_stderr\": 0.01258832385031362\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.6029411764705882,\n \"acc_stderr\": 0.02972215209928006,\n \"acc_norm\": 0.6029411764705882,\n \"acc_norm_stderr\": 0.02972215209928006\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.5980392156862745,\n \"acc_stderr\": 0.019835176484375387,\n \"acc_norm\": 0.5980392156862745,\n \"acc_norm_stderr\": 0.019835176484375387\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6727272727272727,\n \"acc_stderr\": 0.04494290866252091,\n \"acc_norm\": 0.6727272727272727,\n \"acc_norm_stderr\": 0.04494290866252091\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.7142857142857143,\n \"acc_stderr\": 0.028920583220675606,\n \"acc_norm\": 0.7142857142857143,\n \"acc_norm_stderr\": 0.028920583220675606\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.7014925373134329,\n \"acc_stderr\": 0.032357437893550424,\n \"acc_norm\": 0.7014925373134329,\n \"acc_norm_stderr\": 0.032357437893550424\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.84,\n \"acc_stderr\": 0.03684529491774708,\n \"acc_norm\": 0.84,\n \"acc_norm_stderr\": 0.03684529491774708\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.4759036144578313,\n \"acc_stderr\": 0.038879718495972646,\n \"acc_norm\": 0.4759036144578313,\n \"acc_norm_stderr\": 0.038879718495972646\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.8128654970760234,\n \"acc_stderr\": 0.02991312723236804,\n \"acc_norm\": 0.8128654970760234,\n \"acc_norm_stderr\": 0.02991312723236804\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.5177478580171359,\n \"mc1_stderr\": 0.017492470843075356,\n \"mc2\": 0.6676115272078241,\n \"mc2_stderr\": 0.015347165777845506\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7719021310181531,\n \"acc_stderr\": 0.011793015817663594\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.2585291887793783,\n \"acc_stderr\": 0.012059911372516123\n }\n}\n```", "repo_url": "https://huggingface.co/kwchoi/DPO_mistral_7b_alpaca_0124_v1", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_01_25T05_43_08.624881", "path": ["**/details_harness|arc:challenge|25_2024-01-25T05-43-08.624881.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-01-25T05-43-08.624881.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_01_25T05_43_08.624881", "path": ["**/details_harness|gsm8k|5_2024-01-25T05-43-08.624881.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-01-25T05-43-08.624881.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_01_25T05_43_08.624881", "path": ["**/details_harness|hellaswag|10_2024-01-25T05-43-08.624881.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-01-25T05-43-08.624881.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_01_25T05_43_08.624881", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-25T05-43-08.624881.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-25T05-43-08.624881.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-25T05-43-08.624881.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-25T05-43-08.624881.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-25T05-43-08.624881.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-25T05-43-08.624881.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-25T05-43-08.624881.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-25T05-43-08.624881.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-25T05-43-08.624881.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-25T05-43-08.624881.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-25T05-43-08.624881.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-25T05-43-08.624881.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-25T05-43-08.624881.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-25T05-43-08.624881.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-25T05-43-08.624881.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-25T05-43-08.624881.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-25T05-43-08.624881.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-25T05-43-08.624881.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-25T05-43-08.624881.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-25T05-43-08.624881.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-25T05-43-08.624881.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-25T05-43-08.624881.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-25T05-43-08.624881.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-25T05-43-08.624881.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-25T05-43-08.624881.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-25T05-43-08.624881.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-25T05-43-08.624881.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-25T05-43-08.624881.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-25T05-43-08.624881.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-25T05-43-08.624881.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-25T05-43-08.624881.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-25T05-43-08.624881.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-25T05-43-08.624881.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-25T05-43-08.624881.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-25T05-43-08.624881.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-25T05-43-08.624881.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-25T05-43-08.624881.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-25T05-43-08.624881.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-25T05-43-08.624881.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-25T05-43-08.624881.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-25T05-43-08.624881.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-25T05-43-08.624881.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-25T05-43-08.624881.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-25T05-43-08.624881.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-25T05-43-08.624881.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-25T05-43-08.624881.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-25T05-43-08.624881.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-25T05-43-08.624881.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-25T05-43-08.624881.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-25T05-43-08.624881.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-25T05-43-08.624881.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-25T05-43-08.624881.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-25T05-43-08.624881.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-25T05-43-08.624881.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-25T05-43-08.624881.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-25T05-43-08.624881.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-25T05-43-08.624881.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-25T05-43-08.624881.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-25T05-43-08.624881.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-25T05-43-08.624881.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-25T05-43-08.624881.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-25T05-43-08.624881.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-25T05-43-08.624881.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-25T05-43-08.624881.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-25T05-43-08.624881.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-25T05-43-08.624881.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-25T05-43-08.624881.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-25T05-43-08.624881.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-25T05-43-08.624881.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-25T05-43-08.624881.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-25T05-43-08.624881.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-25T05-43-08.624881.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-25T05-43-08.624881.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-25T05-43-08.624881.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-25T05-43-08.624881.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-25T05-43-08.624881.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-25T05-43-08.624881.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-25T05-43-08.624881.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-25T05-43-08.624881.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-25T05-43-08.624881.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-25T05-43-08.624881.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-25T05-43-08.624881.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-25T05-43-08.624881.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-25T05-43-08.624881.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-25T05-43-08.624881.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-25T05-43-08.624881.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-25T05-43-08.624881.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-25T05-43-08.624881.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-25T05-43-08.624881.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-25T05-43-08.624881.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-25T05-43-08.624881.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-25T05-43-08.624881.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-25T05-43-08.624881.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-25T05-43-08.624881.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-25T05-43-08.624881.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-25T05-43-08.624881.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-25T05-43-08.624881.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-25T05-43-08.624881.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-25T05-43-08.624881.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-25T05-43-08.624881.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-25T05-43-08.624881.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-25T05-43-08.624881.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-25T05-43-08.624881.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-25T05-43-08.624881.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-25T05-43-08.624881.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-25T05-43-08.624881.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-25T05-43-08.624881.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-25T05-43-08.624881.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-25T05-43-08.624881.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-25T05-43-08.624881.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-25T05-43-08.624881.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-25T05-43-08.624881.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-25T05-43-08.624881.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-25T05-43-08.624881.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_01_25T05_43_08.624881", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-25T05-43-08.624881.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-25T05-43-08.624881.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_01_25T05_43_08.624881", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-25T05-43-08.624881.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-25T05-43-08.624881.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_01_25T05_43_08.624881", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-25T05-43-08.624881.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-25T05-43-08.624881.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_01_25T05_43_08.624881", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-25T05-43-08.624881.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-25T05-43-08.624881.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_01_25T05_43_08.624881", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-25T05-43-08.624881.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-25T05-43-08.624881.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_01_25T05_43_08.624881", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-25T05-43-08.624881.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-25T05-43-08.624881.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_01_25T05_43_08.624881", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-25T05-43-08.624881.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-25T05-43-08.624881.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_01_25T05_43_08.624881", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-25T05-43-08.624881.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-25T05-43-08.624881.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_01_25T05_43_08.624881", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-25T05-43-08.624881.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-25T05-43-08.624881.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_01_25T05_43_08.624881", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-25T05-43-08.624881.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-25T05-43-08.624881.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_01_25T05_43_08.624881", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-25T05-43-08.624881.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-25T05-43-08.624881.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_01_25T05_43_08.624881", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-25T05-43-08.624881.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-25T05-43-08.624881.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_01_25T05_43_08.624881", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-25T05-43-08.624881.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-25T05-43-08.624881.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_01_25T05_43_08.624881", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-25T05-43-08.624881.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-25T05-43-08.624881.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_01_25T05_43_08.624881", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-25T05-43-08.624881.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-25T05-43-08.624881.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_01_25T05_43_08.624881", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-25T05-43-08.624881.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-25T05-43-08.624881.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_01_25T05_43_08.624881", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-25T05-43-08.624881.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-25T05-43-08.624881.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_01_25T05_43_08.624881", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-25T05-43-08.624881.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-25T05-43-08.624881.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_01_25T05_43_08.624881", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-25T05-43-08.624881.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-25T05-43-08.624881.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_01_25T05_43_08.624881", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-25T05-43-08.624881.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-25T05-43-08.624881.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_01_25T05_43_08.624881", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-25T05-43-08.624881.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-25T05-43-08.624881.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_01_25T05_43_08.624881", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-25T05-43-08.624881.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-25T05-43-08.624881.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_01_25T05_43_08.624881", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-25T05-43-08.624881.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-25T05-43-08.624881.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_01_25T05_43_08.624881", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-25T05-43-08.624881.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-25T05-43-08.624881.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_01_25T05_43_08.624881", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-25T05-43-08.624881.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-25T05-43-08.624881.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_01_25T05_43_08.624881", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-25T05-43-08.624881.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-25T05-43-08.624881.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_01_25T05_43_08.624881", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-25T05-43-08.624881.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-25T05-43-08.624881.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_01_25T05_43_08.624881", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-25T05-43-08.624881.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-25T05-43-08.624881.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_01_25T05_43_08.624881", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-25T05-43-08.624881.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-25T05-43-08.624881.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_01_25T05_43_08.624881", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-25T05-43-08.624881.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-25T05-43-08.624881.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_01_25T05_43_08.624881", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-25T05-43-08.624881.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-25T05-43-08.624881.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_01_25T05_43_08.624881", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-25T05-43-08.624881.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-25T05-43-08.624881.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_01_25T05_43_08.624881", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-25T05-43-08.624881.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-25T05-43-08.624881.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_01_25T05_43_08.624881", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-25T05-43-08.624881.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-25T05-43-08.624881.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_01_25T05_43_08.624881", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-25T05-43-08.624881.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-25T05-43-08.624881.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_01_25T05_43_08.624881", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-25T05-43-08.624881.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-25T05-43-08.624881.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_01_25T05_43_08.624881", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-25T05-43-08.624881.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-25T05-43-08.624881.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_01_25T05_43_08.624881", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-25T05-43-08.624881.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-25T05-43-08.624881.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_01_25T05_43_08.624881", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-25T05-43-08.624881.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-25T05-43-08.624881.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_01_25T05_43_08.624881", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-25T05-43-08.624881.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-25T05-43-08.624881.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_01_25T05_43_08.624881", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-25T05-43-08.624881.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-25T05-43-08.624881.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_01_25T05_43_08.624881", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-25T05-43-08.624881.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-25T05-43-08.624881.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_01_25T05_43_08.624881", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-25T05-43-08.624881.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-25T05-43-08.624881.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_01_25T05_43_08.624881", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-25T05-43-08.624881.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-25T05-43-08.624881.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_01_25T05_43_08.624881", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-25T05-43-08.624881.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-25T05-43-08.624881.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_01_25T05_43_08.624881", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-25T05-43-08.624881.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-25T05-43-08.624881.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_01_25T05_43_08.624881", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-25T05-43-08.624881.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-25T05-43-08.624881.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_01_25T05_43_08.624881", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-25T05-43-08.624881.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-25T05-43-08.624881.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_01_25T05_43_08.624881", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-25T05-43-08.624881.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-25T05-43-08.624881.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_01_25T05_43_08.624881", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-25T05-43-08.624881.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-25T05-43-08.624881.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_01_25T05_43_08.624881", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-25T05-43-08.624881.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-25T05-43-08.624881.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_01_25T05_43_08.624881", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-25T05-43-08.624881.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-25T05-43-08.624881.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_01_25T05_43_08.624881", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-25T05-43-08.624881.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-25T05-43-08.624881.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_01_25T05_43_08.624881", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-25T05-43-08.624881.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-25T05-43-08.624881.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_01_25T05_43_08.624881", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-25T05-43-08.624881.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-25T05-43-08.624881.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_01_25T05_43_08.624881", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-25T05-43-08.624881.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-25T05-43-08.624881.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_01_25T05_43_08.624881", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-25T05-43-08.624881.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-25T05-43-08.624881.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_01_25T05_43_08.624881", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-25T05-43-08.624881.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-25T05-43-08.624881.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_01_25T05_43_08.624881", "path": ["**/details_harness|winogrande|5_2024-01-25T05-43-08.624881.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-01-25T05-43-08.624881.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_01_25T05_43_08.624881", "path": ["results_2024-01-25T05-43-08.624881.parquet"]}, {"split": "latest", "path": ["results_2024-01-25T05-43-08.624881.parquet"]}]}]}
2024-01-25T05:45:51+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of kwchoi/DPO_mistral_7b_alpaca_0124_v1 Dataset automatically created during the evaluation run of model kwchoi/DPO_mistral_7b_alpaca_0124_v1 on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-01-25T05:43:08.624881(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of kwchoi/DPO_mistral_7b_alpaca_0124_v1\n\n\n\nDataset automatically created during the evaluation run of model kwchoi/DPO_mistral_7b_alpaca_0124_v1 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-25T05:43:08.624881(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of kwchoi/DPO_mistral_7b_alpaca_0124_v1\n\n\n\nDataset automatically created during the evaluation run of model kwchoi/DPO_mistral_7b_alpaca_0124_v1 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-25T05:43:08.624881(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ 6, 203, 68, 4, 40, 29, 3, 4, 9, 6, 5, 7, 4, 7, 10, 9, 5, 9, 8, 10, 46, 8, 7, 10, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of kwchoi/DPO_mistral_7b_alpaca_0124_v1\n\n\n\nDataset automatically created during the evaluation run of model kwchoi/DPO_mistral_7b_alpaca_0124_v1 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2024-01-25T05:43:08.624881(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):## Dataset Details### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:## Uses### Direct Use### Out-of-Scope Use## Dataset Structure## Dataset Creation### Curation Rationale### Source Data#### Data Collection and Processing#### Who are the source data producers?### Annotations [optional]#### Annotation process#### Who are the annotators?#### Personal and Sensitive Information## Bias, Risks, and Limitations### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:## Glossary [optional]## More Information [optional]" ]
[ -0.07929955422878265, 0.19961699843406677, -0.005785547662526369, 0.023030798882246017, 0.07245246320962906, -0.024715961888432503, 0.02451341226696968, 0.12907879054546356, 0.028193572536110878, 0.16867975890636444, -0.01608462445437908, 0.09117666631937027, 0.08751208335161209, 0.18200376629829407, 0.013732331804931164, -0.14931046962738037, 0.024844253435730934, -0.07701606303453445, 0.07169285416603088, 0.07843413203954697, 0.08518766611814499, -0.09360437095165253, 0.05507903918623924, -0.015723226591944695, -0.027655061334371567, -0.014993144199252129, -0.08614439517259598, -0.04377080500125885, 0.08236654102802277, 0.06604170799255371, 0.030611176043748856, -0.010033860802650452, 0.0015284622786566615, -0.24752523005008698, 0.021430937573313713, 0.07658732682466507, 0.03635134920477867, 0.06934047490358353, 0.13431747257709503, -0.06493854522705078, 0.08682450652122498, -0.0767645314335823, 0.05054435133934021, 0.03517431020736694, -0.11231785267591476, -0.08230236917734146, -0.1676696389913559, 0.018828092142939568, 0.08219127357006073, 0.05688473954796791, -0.02556629292666912, 0.11698699742555618, -0.010556916706264019, 0.0269315242767334, 0.1536562144756317, -0.1307610273361206, -0.024792643263936043, -0.015899335965514183, 0.03599252179265022, 0.030998604372143745, -0.1050734594464302, -0.033049263060092926, 0.02463473193347454, 0.03544803336262703, -0.011545995250344276, 0.01205418910831213, -0.00544471712782979, 0.011068884283304214, -0.13090990483760834, -0.0799773633480072, 0.12803849577903748, -0.002775863278657198, -0.058519888669252396, -0.1378462016582489, -0.022512251511216164, -0.0556505061686039, -0.002242487855255604, -0.016545042395591736, 0.018225235864520073, -0.02303476631641388, 0.06057589873671532, 0.0003980433102697134, -0.09102804958820343, -0.019833166152238846, -0.04057413712143898, 0.07314480096101761, 0.019508589059114456, -0.013197281397879124, -0.007306886371225119, 0.13140669465065002, 0.027882050722837448, -0.10335295647382736, -0.11733388900756836, -0.03883752226829529, -0.0963498055934906, -0.05398693308234215, 0.00356526137329638, -0.05375487729907036, 0.04159657284617424, 0.23399129509925842, -0.06420144438743591, 0.013226520270109177, -0.07793176919221878, -0.007426226511597633, 0.11066089570522308, 0.08358431607484818, -0.03083106130361557, -0.03948653116822243, -0.016634831205010414, 0.04351019114255905, 0.014376100152730942, -0.008396743796765804, 0.03624092414975166, 0.0589626170694828, 0.041175127029418945, 0.1268121600151062, 0.12146143615245819, 0.01411289069801569, -0.06924314796924591, -0.027273444458842278, 0.18686066567897797, -0.17917992174625397, -0.008992604911327362, 0.0072973729111254215, -0.053605783730745316, -0.0932931900024414, 0.04992819204926491, -0.006095870863646269, -0.051411256194114685, 0.1089579239487648, -0.06428854912519455, -0.05482986569404602, -0.08134712278842926, -0.036314982920885086, 0.0577424056828022, -0.011388739570975304, -0.04050305113196373, -0.0685814619064331, -0.12764063477516174, -0.0930599570274353, -0.005855246912688017, -0.07563371956348419, -0.011530283838510513, 0.02854287624359131, -0.013681914657354355, -0.02069343812763691, -0.018791556358337402, 0.12915581464767456, -0.07315797358751297, 0.02579793520271778, 0.0009698436479084194, -0.0007984045660123229, 0.08882635086774826, 0.04790028929710388, -0.13012686371803284, 0.08638352900743484, -0.08665812015533447, 0.11744154244661331, -0.09327854216098785, 0.004174253903329372, -0.1305573284626007, -0.0005363237578421831, -0.05176708847284317, 0.0022714249789714813, 0.02604459598660469, 0.11263984441757202, -0.2530358135700226, 0.004624819383025169, 0.11750325560569763, -0.10206426680088043, -0.0940600112080574, 0.037053003907203674, -0.03062189184129238, 0.05468283221125603, 0.058920297771692276, 0.0829271450638771, 0.12250708043575287, -0.06598471850156784, -0.14296533167362213, -0.10058322548866272, 0.0026327299419790506, 0.13499906659126282, 0.057047903537750244, -0.071681447327137, 0.15096555650234222, 0.03563940152525902, -0.032521989196538925, -0.045733172446489334, -0.022978810593485832, -0.05273482948541641, -0.01173147838562727, -0.05323440581560135, -0.08690573275089264, -0.011869380250573158, -0.05856224521994591, -0.02546878345310688, -0.07919888198375702, 0.05094402655959129, 0.10221413522958755, 0.017586084082722664, 0.012081749737262726, -0.08406861126422882, 0.06822055578231812, 0.0041326237842440605, 0.014361314475536346, -0.23169510066509247, -0.09557560831308365, 0.04053588584065437, -0.1366177797317505, 0.022381218150258064, -0.002899212297052145, 0.011588510125875473, 0.027278058230876923, 0.004745724610984325, 0.014295178465545177, -0.008268224075436592, -0.004116902127861977, -0.029786821454763412, -0.12772195041179657, -0.035473182797431946, -0.08166053146123886, 0.05640624091029167, -0.1271902322769165, -0.029522748664021492, 0.1038079708814621, 0.18065409362316132, 0.025673478841781616, -0.09810036420822144, 0.06659108400344849, 0.007115242071449757, -0.03747543692588806, -0.07061276584863663, -0.004616744350641966, -0.013213622383773327, 0.06150275841355324, 0.06326159089803696, -0.18078000843524933, -0.14403283596038818, 0.06868299841880798, 0.1657201647758484, -0.06485585123300552, -0.04908593371510506, -0.09748382866382599, -0.047163840383291245, -0.08910074084997177, -0.04831622913479805, 0.0935451090335846, 0.08134624361991882, 0.019948141649365425, -0.06039264425635338, -0.09943296015262604, -0.012395773082971573, 0.07406900078058243, -0.05680178105831146, 0.09393329918384552, 0.033476538956165314, -0.10019610822200775, 0.08944027125835419, 0.027308352291584015, 0.1345098316669464, 0.10002393275499344, -0.01732119359076023, -0.13097457587718964, -0.005753492470830679, 0.06545847654342651, 0.03230718523263931, 0.0879969596862793, 0.0007356953574344516, 0.023688217625021935, 0.0761246606707573, -0.0062657310627400875, 0.04320482537150383, -0.06193476915359497, 0.03992782533168793, 0.02444261498749256, -0.00823246967047453, 0.019692644476890564, 0.02677382528781891, 0.017069993540644646, 0.07012540847063065, 0.05000260844826698, 0.10776416212320328, -0.026694731786847115, -0.04309771955013275, -0.08596011251211166, 0.14565901458263397, -0.08976346999406815, -0.21760377287864685, -0.1292308270931244, -0.029913509264588356, -0.003738964209333062, -0.012489612214267254, 0.05162685364484787, -0.005204026587307453, -0.08805838227272034, -0.1035550907254219, 0.03727058693766594, 0.05983014032244682, -0.09268601983785629, -0.008267582394182682, 0.02660345286130905, 0.024957289919257164, -0.16352927684783936, 0.03587311506271362, 0.0318654328584671, -0.017567962408065796, -0.013634144328534603, 0.10733215510845184, 0.11923534423112869, 0.07072359323501587, 0.0471966415643692, -0.03256713226437569, -0.0022919801995158195, 0.19164785742759705, -0.10094583034515381, 0.016647741198539734, 0.09879973530769348, -0.0579431876540184, 0.05909476801753044, 0.1324951946735382, -0.0050099436193704605, -0.09251895546913147, 0.040336787700653076, 0.09385939687490463, -0.0443756990134716, -0.2718459367752075, -0.06337670981884003, -0.030580686405301094, -0.004658655263483524, 0.08739695698022842, 0.0816284567117691, -0.025116287171840668, 0.007680602837353945, -0.11190702021121979, -0.03948689624667168, -0.0643514096736908, 0.05891234055161476, 0.04105007275938988, 0.01969660259783268, 0.045459482818841934, -0.04740811139345169, 0.04531219229102135, 0.10939056426286697, 0.026033665984869003, 0.18034382164478302, -0.0571342296898365, 0.15950588881969452, 0.09294396638870239, 0.08066342025995255, -0.02365446463227272, 0.061337750405073166, 0.002287287497892976, 0.059093672782182693, 0.014210745692253113, -0.0988011509180069, -0.04133233055472374, 0.0755707398056984, -0.0052362000569701195, -0.03369249403476715, 0.039583854377269745, -0.040954187512397766, 0.059703875333070755, 0.20608735084533691, 0.014557018876075745, -0.148872509598732, -0.08661594986915588, 0.046680748462677, -0.04648232460021973, -0.09281639754772186, -0.03231821209192276, 0.05080531910061836, -0.12730036675930023, 0.028687814250588417, -0.033748164772987366, 0.08324401080608368, -0.12914051115512848, -0.022391216829419136, 0.008971104398369789, 0.07767081260681152, -0.021462498232722282, 0.08907921612262726, -0.13199816644191742, 0.10025572031736374, -0.006556734908372164, 0.0608254000544548, -0.07343058288097382, 0.058240365236997604, 0.000887702452018857, -0.08212689310312271, 0.11536610871553421, 0.00673121539875865, -0.0972120612859726, -0.03768985718488693, -0.13522173464298248, 0.004538387525826693, 0.058576855808496475, -0.12976323068141937, 0.11446554958820343, 0.02691911719739437, -0.015491221100091934, -0.03502269089221954, -0.039643462747335434, -0.12128295004367828, -0.1917070746421814, 0.13184913992881775, -0.13074898719787598, 0.0896928459405899, -0.06661295145750046, -0.05293743312358856, -0.034226082265377045, 0.16805684566497803, -0.0952172577381134, -0.07711061090230942, -0.12277337163686752, 0.04543635621666908, 0.17843739688396454, -0.06705418229103088, 0.057845525443553925, -0.02850872278213501, 0.1853799819946289, 0.00815619621425867, -0.06114304065704346, 0.0023589932825416327, -0.0722850114107132, -0.18204519152641296, -0.04013893008232117, 0.13402895629405975, 0.06575857102870941, -0.006534819956868887, -0.004143624100834131, 0.05574759095907211, 0.007234510965645313, -0.08463063091039658, 0.04406621679663658, 0.07062359154224396, 0.09938959032297134, 0.019812695682048798, -0.03817746043205261, -0.0916953757405281, -0.12681365013122559, -0.09187193214893341, 0.04440249502658844, 0.15995165705680847, -0.04218265041708946, 0.13483954966068268, 0.08657864481210709, -0.09098423272371292, -0.16318555176258087, -0.03665950149297714, 0.036807041615247726, -0.014460469596087933, 0.09761255234479904, -0.1845596879720688, 0.06777754426002502, 0.08911491185426712, -0.0235680490732193, 0.16921474039554596, -0.2182852029800415, -0.1495068371295929, 0.03467971086502075, 0.01926645264029503, -0.18644104897975922, -0.14817515015602112, -0.12432047724723816, -0.005376368761062622, -0.154403418302536, 0.12766960263252258, 0.009880859404802322, 0.02428271993994713, -0.024902893230319023, 0.06061637029051781, 0.04749254137277603, -0.05138546600937843, 0.13506340980529785, -0.0012034132378175855, 0.0156514011323452, -0.09533945471048355, -0.03091198205947876, -0.01769561879336834, -0.06698434054851532, 0.058220915496349335, 0.030677497386932373, 0.07296650111675262, -0.10249470174312592, -0.029219860211014748, -0.04672854393720627, 0.058853842318058014, -0.05768933147192001, -0.05121467635035515, -0.05383560433983803, 0.0744992047548294, 0.052997127175331116, -0.016974151134490967, 0.0391061007976532, -0.028406180441379547, 0.06326302886009216, 0.1797838658094406, 0.06635493040084839, 0.05315106734633446, -0.08145611733198166, -0.020200207829475403, 0.0003978907479904592, -0.006355250254273415, -0.12226743996143341, 0.03476552292704582, 0.10743783414363861, 0.05433488264679909, 0.06902311742305756, -0.018101485446095467, -0.18791839480400085, 0.01084410771727562, 0.09041154384613037, -0.10710388422012329, -0.1748107522726059, 0.0267894696444273, 0.12640532851219177, -0.1436697393655777, -0.05184917151927948, 0.09718851000070572, 0.022171439602971077, -0.038869377225637436, 0.013627992942929268, 0.06957313418388367, 0.04464154317975044, 0.12409396469593048, -0.015010586939752102, 0.05338350683450699, -0.07290428131818771, 0.11218642443418503, 0.11707252264022827, -0.0904717966914177, 0.006409998517483473, 0.08542973548173904, -0.0646464005112648, -0.04946859925985336, -0.0026653928216546774, 0.03402825817465782, -0.010132623836398125, -0.02065490186214447, 0.003098484128713608, -0.044782403856515884, 0.07917458564043045, 0.15500012040138245, -0.007654902059584856, 0.06696970760822296, 0.03389834985136986, 0.002320648869499564, -0.0471765473484993, 0.11973521113395691, 0.05045203119516373, 0.05494963750243187, -0.03088931366801262, 0.021389106288552284, -0.002112533198669553, 0.0004052081785630435, 0.015546688809990883, -0.0376911535859108, -0.029702357947826385, -0.0219450443983078, -0.19410043954849243, 0.012108570896089077, -0.09282681345939636, -0.021955182775855064, -0.014342726208269596, -0.03654365614056587, -0.019544130191206932, 0.030961671844124794, -0.06877179443836212, -0.06887278705835342, -0.06553053855895996, 0.09516607969999313, -0.1870773881673813, 0.004869646392762661, 0.08419323712587357, -0.0842069536447525, 0.08774245530366898, 0.0405154787003994, 0.004639640916138887, 0.004721200559288263, -0.07878571003675461, -0.020193930715322495, -0.017993228510022163, 0.03752947598695755, 0.028907541185617447, -0.17168404161930084, -0.011985929682850838, 0.009550105780363083, -0.061708543449640274, -0.019527358934283257, 0.047855131328105927, -0.15582001209259033, 0.012391632422804832, 0.06378021836280823, -0.010965687222778797, -0.03371954336762428, 0.02528623677790165, 0.07353942096233368, 0.02126540243625641, 0.09932911396026611, -0.005853552836924791, 0.03552514314651489, -0.16572922468185425, -0.040354955941438675, -0.0023901411332190037, -0.00017975244554691017, 0.027402866631746292, 0.0354623906314373, 0.041840292513370514, -0.02137354202568531, 0.18460537493228912, -0.06267077475786209, 0.04462717846035957, 0.05234205722808838, -0.016951775178313255, -0.07956121861934662, 0.041416723281145096, 0.07747745513916016, 0.028168128803372383, 0.010196628980338573, 0.002534026512876153, -0.04084184020757675, -0.044478483498096466, -0.06536846607923508, 0.09230818599462509, 0.16305819153785706, 0.16944772005081177, -0.011182730086147785, 0.06603717058897018, -0.16449891030788422, -0.06973595172166824, 0.007721886970102787, -0.0695933923125267, 0.03935101255774498, -0.05707399919629097, 0.07041126489639282, 0.079852394759655, -0.12534396350383759, 0.09914176911115646, -0.06888122111558914, -0.02710009180009365, -0.022805171087384224, -0.12682849168777466, -0.02805473282933235, 0.0059722294099628925, 0.007513764780014753, -0.09022828191518784, 0.10818277299404144, 0.10024886578321457, -0.004878041800111532, -0.011750170029699802, 0.11427423357963562, -0.08595983684062958, -0.07948228716850281, 0.006146223749965429, 0.014637976884841919, 0.021189378574490547, 0.008639488369226456, 0.05679136887192726, 0.019184021279215813, 0.0759064257144928, 0.07330186665058136, 0.07301004976034164, 0.04993979260325432, 0.031031375750899315, -0.016937263309955597, -0.06048983708024025, 0.02146589197218418, -0.007889640517532825, -0.04800000041723251, 0.17131857573986053, 0.051976755261421204, 0.04300786182284355, 0.020861726254224777, 0.22576123476028442, 0.004743433091789484, -0.05812528356909752, -0.13042199611663818, 0.0937366709113121, 0.022618109360337257, 0.010713913477957249, 0.030084002763032913, -0.15000255405902863, 0.027793342247605324, 0.15098518133163452, 0.057473573833703995, 0.03986497223377228, 0.006113734561949968, 0.03229076787829399, 0.02465333230793476, -0.0343976765871048, 0.018354065716266632, 0.06267642229795456, 0.17457589507102966, -0.036542851477861404, 0.04591871425509453, 0.001077197608537972, -0.02285989560186863, -0.009007202461361885, 0.09102193266153336, -0.05544004589319229, 0.017230143770575523, -0.07160978019237518, 0.08593295514583588, -0.06500602513551712, -0.3156191408634186, 0.0038123163394629955, -0.0962110236287117, -0.15712635219097137, -0.01760047860443592, 0.04018200933933258, -0.03077739104628563, 0.03717997670173645, 0.04546177387237549, -0.024595191702246666, 0.17552758753299713, 0.015048246830701828, -0.04953671991825104, -0.07287275791168213, 0.07702882587909698, -0.03142203763127327, 0.21651257574558258, -0.006632134318351746, 0.032547980546951294, 0.09336884319782257, 0.008696814067661762, -0.19910769164562225, 0.023062050342559814, 0.08582352101802826, -0.08039306849241257, 0.05283137038350105, 0.18023565411567688, -0.011144531890749931, 0.07870607078075409, 0.06744552403688431, 0.01660001277923584, 0.040196556597948074, 0.07073066383600235, 0.034543026238679886, -0.10563384741544724, 0.050382744520902634, -0.07927335053682327, 0.13748948276042938, 0.12588250637054443, -0.03339626267552376, 0.021127590909600258, -0.05503159016370773, 0.06115363538265228, -0.022181330248713493, 0.1067345142364502, -0.015310266986489296, -0.17403103411197662, 0.059933140873909, 0.07816384732723236, 0.06095883250236511, -0.20057669281959534, -0.05221109837293625, 0.10042732954025269, -0.05636553466320038, 0.0078105139546096325, 0.11918320506811142, 0.03336498513817787, 0.019139952957630157, -0.055623654276132584, -0.08002399653196335, 0.007827246561646461, 0.10992427170276642, -0.09781109541654587, -0.04108983650803566 ]
6bc760639f406d52f58158cd1a65512baa136a15
# Dataset Card for Evaluation run of TeeZee/Buttocks-7B-v1.0 <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [TeeZee/Buttocks-7B-v1.0](https://huggingface.co/TeeZee/Buttocks-7B-v1.0) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_TeeZee__Buttocks-7B-v1.0", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-01-25T05:45:51.088274](https://huggingface.co/datasets/open-llm-leaderboard/details_TeeZee__Buttocks-7B-v1.0/blob/main/results_2024-01-25T05-45-51.088274.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.49958748050002727, "acc_stderr": 0.03449947558483939, "acc_norm": 0.5072913093747228, "acc_norm_stderr": 0.03532795103647748, "mc1": 0.2962056303549572, "mc1_stderr": 0.015983595101811392, "mc2": 0.4472415883922134, "mc2_stderr": 0.015128282783775687 }, "harness|arc:challenge|25": { "acc": 0.5213310580204779, "acc_stderr": 0.014598087973127106, "acc_norm": 0.5460750853242321, "acc_norm_stderr": 0.01454922110517187 }, "harness|hellaswag|10": { "acc": 0.578868751244772, "acc_stderr": 0.004927314729433553, "acc_norm": 0.7561242780322645, "acc_norm_stderr": 0.004285410130466104 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.28, "acc_stderr": 0.04512608598542128, "acc_norm": 0.28, "acc_norm_stderr": 0.04512608598542128 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.48148148148148145, "acc_stderr": 0.043163785995113245, "acc_norm": 0.48148148148148145, "acc_norm_stderr": 0.043163785995113245 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.4342105263157895, "acc_stderr": 0.040335656678483184, "acc_norm": 0.4342105263157895, "acc_norm_stderr": 0.040335656678483184 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.44, "acc_stderr": 0.04988876515698589, "acc_norm": 0.44, "acc_norm_stderr": 0.04988876515698589 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.5358490566037736, "acc_stderr": 0.030693675018458003, "acc_norm": 0.5358490566037736, "acc_norm_stderr": 0.030693675018458003 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.5347222222222222, "acc_stderr": 0.04171115858181618, "acc_norm": 0.5347222222222222, "acc_norm_stderr": 0.04171115858181618 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.38, "acc_stderr": 0.04878317312145632, "acc_norm": 0.38, "acc_norm_stderr": 0.04878317312145632 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.4, "acc_stderr": 0.049236596391733084, "acc_norm": 0.4, "acc_norm_stderr": 0.049236596391733084 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.37, "acc_stderr": 0.048523658709391, "acc_norm": 0.37, "acc_norm_stderr": 0.048523658709391 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.4508670520231214, "acc_stderr": 0.03794012674697029, "acc_norm": 0.4508670520231214, "acc_norm_stderr": 0.03794012674697029 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.3431372549019608, "acc_stderr": 0.04724007352383888, "acc_norm": 0.3431372549019608, "acc_norm_stderr": 0.04724007352383888 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.63, "acc_stderr": 0.048523658709391, "acc_norm": 0.63, "acc_norm_stderr": 0.048523658709391 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.4595744680851064, "acc_stderr": 0.03257901482099835, "acc_norm": 0.4595744680851064, "acc_norm_stderr": 0.03257901482099835 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.35964912280701755, "acc_stderr": 0.04514496132873633, "acc_norm": 0.35964912280701755, "acc_norm_stderr": 0.04514496132873633 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.45517241379310347, "acc_stderr": 0.04149886942192117, "acc_norm": 0.45517241379310347, "acc_norm_stderr": 0.04149886942192117 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.36243386243386244, "acc_stderr": 0.024757473902752035, "acc_norm": 0.36243386243386244, "acc_norm_stderr": 0.024757473902752035 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.2698412698412698, "acc_stderr": 0.03970158273235172, "acc_norm": 0.2698412698412698, "acc_norm_stderr": 0.03970158273235172 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.29, "acc_stderr": 0.045604802157206845, "acc_norm": 0.29, "acc_norm_stderr": 0.045604802157206845 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.6064516129032258, "acc_stderr": 0.027791878753132267, "acc_norm": 0.6064516129032258, "acc_norm_stderr": 0.027791878753132267 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.39408866995073893, "acc_stderr": 0.034381579670365446, "acc_norm": 0.39408866995073893, "acc_norm_stderr": 0.034381579670365446 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.46, "acc_stderr": 0.05009082659620332, "acc_norm": 0.46, "acc_norm_stderr": 0.05009082659620332 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.6303030303030303, "acc_stderr": 0.03769430314512568, "acc_norm": 0.6303030303030303, "acc_norm_stderr": 0.03769430314512568 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.6565656565656566, "acc_stderr": 0.03383201223244441, "acc_norm": 0.6565656565656566, "acc_norm_stderr": 0.03383201223244441 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.689119170984456, "acc_stderr": 0.03340361906276586, "acc_norm": 0.689119170984456, "acc_norm_stderr": 0.03340361906276586 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.4846153846153846, "acc_stderr": 0.025339003010106522, "acc_norm": 0.4846153846153846, "acc_norm_stderr": 0.025339003010106522 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.28888888888888886, "acc_stderr": 0.027634907264178544, "acc_norm": 0.28888888888888886, "acc_norm_stderr": 0.027634907264178544 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.5378151260504201, "acc_stderr": 0.03238546948758979, "acc_norm": 0.5378151260504201, "acc_norm_stderr": 0.03238546948758979 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.33112582781456956, "acc_stderr": 0.038425817186598696, "acc_norm": 0.33112582781456956, "acc_norm_stderr": 0.038425817186598696 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.6990825688073394, "acc_stderr": 0.019664751366802114, "acc_norm": 0.6990825688073394, "acc_norm_stderr": 0.019664751366802114 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.4166666666666667, "acc_stderr": 0.03362277436608043, "acc_norm": 0.4166666666666667, "acc_norm_stderr": 0.03362277436608043 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.6127450980392157, "acc_stderr": 0.03418931233833342, "acc_norm": 0.6127450980392157, "acc_norm_stderr": 0.03418931233833342 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.6413502109704642, "acc_stderr": 0.031219569445301833, "acc_norm": 0.6413502109704642, "acc_norm_stderr": 0.031219569445301833 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.5605381165919282, "acc_stderr": 0.03331092511038179, "acc_norm": 0.5605381165919282, "acc_norm_stderr": 0.03331092511038179 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.5801526717557252, "acc_stderr": 0.04328577215262971, "acc_norm": 0.5801526717557252, "acc_norm_stderr": 0.04328577215262971 }, "harness|hendrycksTest-international_law|5": { "acc": 0.6528925619834711, "acc_stderr": 0.04345724570292535, "acc_norm": 0.6528925619834711, "acc_norm_stderr": 0.04345724570292535 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.5648148148148148, "acc_stderr": 0.04792898170907061, "acc_norm": 0.5648148148148148, "acc_norm_stderr": 0.04792898170907061 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.5828220858895705, "acc_stderr": 0.03874102859818081, "acc_norm": 0.5828220858895705, "acc_norm_stderr": 0.03874102859818081 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.4017857142857143, "acc_stderr": 0.04653333146973647, "acc_norm": 0.4017857142857143, "acc_norm_stderr": 0.04653333146973647 }, "harness|hendrycksTest-management|5": { "acc": 0.6796116504854369, "acc_stderr": 0.04620284082280042, "acc_norm": 0.6796116504854369, "acc_norm_stderr": 0.04620284082280042 }, "harness|hendrycksTest-marketing|5": { "acc": 0.7649572649572649, "acc_stderr": 0.027778835904935437, "acc_norm": 0.7649572649572649, "acc_norm_stderr": 0.027778835904935437 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.6, "acc_stderr": 0.049236596391733084, "acc_norm": 0.6, "acc_norm_stderr": 0.049236596391733084 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.6411238825031929, "acc_stderr": 0.017152991797501342, "acc_norm": 0.6411238825031929, "acc_norm_stderr": 0.017152991797501342 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.5, "acc_stderr": 0.026919095102908273, "acc_norm": 0.5, "acc_norm_stderr": 0.026919095102908273 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.23687150837988827, "acc_stderr": 0.014219570788103986, "acc_norm": 0.23687150837988827, "acc_norm_stderr": 0.014219570788103986 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.5032679738562091, "acc_stderr": 0.028629305194003543, "acc_norm": 0.5032679738562091, "acc_norm_stderr": 0.028629305194003543 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.5852090032154341, "acc_stderr": 0.027982680459759567, "acc_norm": 0.5852090032154341, "acc_norm_stderr": 0.027982680459759567 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.5123456790123457, "acc_stderr": 0.027812262269327242, "acc_norm": 0.5123456790123457, "acc_norm_stderr": 0.027812262269327242 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.40070921985815605, "acc_stderr": 0.029233465745573086, "acc_norm": 0.40070921985815605, "acc_norm_stderr": 0.029233465745573086 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.3852672750977836, "acc_stderr": 0.012429485434955182, "acc_norm": 0.3852672750977836, "acc_norm_stderr": 0.012429485434955182 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.47058823529411764, "acc_stderr": 0.030320243265004137, "acc_norm": 0.47058823529411764, "acc_norm_stderr": 0.030320243265004137 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.4820261437908497, "acc_stderr": 0.020214761037872408, "acc_norm": 0.4820261437908497, "acc_norm_stderr": 0.020214761037872408 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6454545454545455, "acc_stderr": 0.045820048415054174, "acc_norm": 0.6454545454545455, "acc_norm_stderr": 0.045820048415054174 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.5346938775510204, "acc_stderr": 0.03193207024425314, "acc_norm": 0.5346938775510204, "acc_norm_stderr": 0.03193207024425314 }, "harness|hendrycksTest-sociology|5": { "acc": 0.6318407960199005, "acc_stderr": 0.03410410565495302, "acc_norm": 0.6318407960199005, "acc_norm_stderr": 0.03410410565495302 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.69, "acc_stderr": 0.04648231987117316, "acc_norm": 0.69, "acc_norm_stderr": 0.04648231987117316 }, "harness|hendrycksTest-virology|5": { "acc": 0.4578313253012048, "acc_stderr": 0.038786267710023595, "acc_norm": 0.4578313253012048, "acc_norm_stderr": 0.038786267710023595 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.6374269005847953, "acc_stderr": 0.036871306155620606, "acc_norm": 0.6374269005847953, "acc_norm_stderr": 0.036871306155620606 }, "harness|truthfulqa:mc|0": { "mc1": 0.2962056303549572, "mc1_stderr": 0.015983595101811392, "mc2": 0.4472415883922134, "mc2_stderr": 0.015128282783775687 }, "harness|winogrande|5": { "acc": 0.6890292028413575, "acc_stderr": 0.013009534736286058 }, "harness|gsm8k|5": { "acc": 0.0576194086429113, "acc_stderr": 0.006418593319822861 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_TeeZee__Buttocks-7B-v1.0
[ "region:us" ]
2024-01-25T05:48:10+00:00
{"pretty_name": "Evaluation run of TeeZee/Buttocks-7B-v1.0", "dataset_summary": "Dataset automatically created during the evaluation run of model [TeeZee/Buttocks-7B-v1.0](https://huggingface.co/TeeZee/Buttocks-7B-v1.0) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_TeeZee__Buttocks-7B-v1.0\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-01-25T05:45:51.088274](https://huggingface.co/datasets/open-llm-leaderboard/details_TeeZee__Buttocks-7B-v1.0/blob/main/results_2024-01-25T05-45-51.088274.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.49958748050002727,\n \"acc_stderr\": 0.03449947558483939,\n \"acc_norm\": 0.5072913093747228,\n \"acc_norm_stderr\": 0.03532795103647748,\n \"mc1\": 0.2962056303549572,\n \"mc1_stderr\": 0.015983595101811392,\n \"mc2\": 0.4472415883922134,\n \"mc2_stderr\": 0.015128282783775687\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.5213310580204779,\n \"acc_stderr\": 0.014598087973127106,\n \"acc_norm\": 0.5460750853242321,\n \"acc_norm_stderr\": 0.01454922110517187\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.578868751244772,\n \"acc_stderr\": 0.004927314729433553,\n \"acc_norm\": 0.7561242780322645,\n \"acc_norm_stderr\": 0.004285410130466104\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.28,\n \"acc_stderr\": 0.04512608598542128,\n \"acc_norm\": 0.28,\n \"acc_norm_stderr\": 0.04512608598542128\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.48148148148148145,\n \"acc_stderr\": 0.043163785995113245,\n \"acc_norm\": 0.48148148148148145,\n \"acc_norm_stderr\": 0.043163785995113245\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.4342105263157895,\n \"acc_stderr\": 0.040335656678483184,\n \"acc_norm\": 0.4342105263157895,\n \"acc_norm_stderr\": 0.040335656678483184\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.44,\n \"acc_stderr\": 0.04988876515698589,\n \"acc_norm\": 0.44,\n \"acc_norm_stderr\": 0.04988876515698589\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.5358490566037736,\n \"acc_stderr\": 0.030693675018458003,\n \"acc_norm\": 0.5358490566037736,\n \"acc_norm_stderr\": 0.030693675018458003\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.5347222222222222,\n \"acc_stderr\": 0.04171115858181618,\n \"acc_norm\": 0.5347222222222222,\n \"acc_norm_stderr\": 0.04171115858181618\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.38,\n \"acc_stderr\": 0.04878317312145632,\n \"acc_norm\": 0.38,\n \"acc_norm_stderr\": 0.04878317312145632\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.4,\n \"acc_stderr\": 0.049236596391733084,\n \"acc_norm\": 0.4,\n \"acc_norm_stderr\": 0.049236596391733084\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.37,\n \"acc_stderr\": 0.048523658709391,\n \"acc_norm\": 0.37,\n \"acc_norm_stderr\": 0.048523658709391\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.4508670520231214,\n \"acc_stderr\": 0.03794012674697029,\n \"acc_norm\": 0.4508670520231214,\n \"acc_norm_stderr\": 0.03794012674697029\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.3431372549019608,\n \"acc_stderr\": 0.04724007352383888,\n \"acc_norm\": 0.3431372549019608,\n \"acc_norm_stderr\": 0.04724007352383888\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.63,\n \"acc_stderr\": 0.048523658709391,\n \"acc_norm\": 0.63,\n \"acc_norm_stderr\": 0.048523658709391\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.4595744680851064,\n \"acc_stderr\": 0.03257901482099835,\n \"acc_norm\": 0.4595744680851064,\n \"acc_norm_stderr\": 0.03257901482099835\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.35964912280701755,\n \"acc_stderr\": 0.04514496132873633,\n \"acc_norm\": 0.35964912280701755,\n \"acc_norm_stderr\": 0.04514496132873633\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.45517241379310347,\n \"acc_stderr\": 0.04149886942192117,\n \"acc_norm\": 0.45517241379310347,\n \"acc_norm_stderr\": 0.04149886942192117\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.36243386243386244,\n \"acc_stderr\": 0.024757473902752035,\n \"acc_norm\": 0.36243386243386244,\n \"acc_norm_stderr\": 0.024757473902752035\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.2698412698412698,\n \"acc_stderr\": 0.03970158273235172,\n \"acc_norm\": 0.2698412698412698,\n \"acc_norm_stderr\": 0.03970158273235172\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.29,\n \"acc_stderr\": 0.045604802157206845,\n \"acc_norm\": 0.29,\n \"acc_norm_stderr\": 0.045604802157206845\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.6064516129032258,\n \"acc_stderr\": 0.027791878753132267,\n \"acc_norm\": 0.6064516129032258,\n \"acc_norm_stderr\": 0.027791878753132267\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.39408866995073893,\n \"acc_stderr\": 0.034381579670365446,\n \"acc_norm\": 0.39408866995073893,\n \"acc_norm_stderr\": 0.034381579670365446\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.46,\n \"acc_stderr\": 0.05009082659620332,\n \"acc_norm\": 0.46,\n \"acc_norm_stderr\": 0.05009082659620332\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.6303030303030303,\n \"acc_stderr\": 0.03769430314512568,\n \"acc_norm\": 0.6303030303030303,\n \"acc_norm_stderr\": 0.03769430314512568\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.6565656565656566,\n \"acc_stderr\": 0.03383201223244441,\n \"acc_norm\": 0.6565656565656566,\n \"acc_norm_stderr\": 0.03383201223244441\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.689119170984456,\n \"acc_stderr\": 0.03340361906276586,\n \"acc_norm\": 0.689119170984456,\n \"acc_norm_stderr\": 0.03340361906276586\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.4846153846153846,\n \"acc_stderr\": 0.025339003010106522,\n \"acc_norm\": 0.4846153846153846,\n \"acc_norm_stderr\": 0.025339003010106522\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.28888888888888886,\n \"acc_stderr\": 0.027634907264178544,\n \"acc_norm\": 0.28888888888888886,\n \"acc_norm_stderr\": 0.027634907264178544\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.5378151260504201,\n \"acc_stderr\": 0.03238546948758979,\n \"acc_norm\": 0.5378151260504201,\n \"acc_norm_stderr\": 0.03238546948758979\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.33112582781456956,\n \"acc_stderr\": 0.038425817186598696,\n \"acc_norm\": 0.33112582781456956,\n \"acc_norm_stderr\": 0.038425817186598696\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.6990825688073394,\n \"acc_stderr\": 0.019664751366802114,\n \"acc_norm\": 0.6990825688073394,\n \"acc_norm_stderr\": 0.019664751366802114\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.4166666666666667,\n \"acc_stderr\": 0.03362277436608043,\n \"acc_norm\": 0.4166666666666667,\n \"acc_norm_stderr\": 0.03362277436608043\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.6127450980392157,\n \"acc_stderr\": 0.03418931233833342,\n \"acc_norm\": 0.6127450980392157,\n \"acc_norm_stderr\": 0.03418931233833342\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.6413502109704642,\n \"acc_stderr\": 0.031219569445301833,\n \"acc_norm\": 0.6413502109704642,\n \"acc_norm_stderr\": 0.031219569445301833\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.5605381165919282,\n \"acc_stderr\": 0.03331092511038179,\n \"acc_norm\": 0.5605381165919282,\n \"acc_norm_stderr\": 0.03331092511038179\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.5801526717557252,\n \"acc_stderr\": 0.04328577215262971,\n \"acc_norm\": 0.5801526717557252,\n \"acc_norm_stderr\": 0.04328577215262971\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.6528925619834711,\n \"acc_stderr\": 0.04345724570292535,\n \"acc_norm\": 0.6528925619834711,\n \"acc_norm_stderr\": 0.04345724570292535\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.5648148148148148,\n \"acc_stderr\": 0.04792898170907061,\n \"acc_norm\": 0.5648148148148148,\n \"acc_norm_stderr\": 0.04792898170907061\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.5828220858895705,\n \"acc_stderr\": 0.03874102859818081,\n \"acc_norm\": 0.5828220858895705,\n \"acc_norm_stderr\": 0.03874102859818081\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.4017857142857143,\n \"acc_stderr\": 0.04653333146973647,\n \"acc_norm\": 0.4017857142857143,\n \"acc_norm_stderr\": 0.04653333146973647\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.6796116504854369,\n \"acc_stderr\": 0.04620284082280042,\n \"acc_norm\": 0.6796116504854369,\n \"acc_norm_stderr\": 0.04620284082280042\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.7649572649572649,\n \"acc_stderr\": 0.027778835904935437,\n \"acc_norm\": 0.7649572649572649,\n \"acc_norm_stderr\": 0.027778835904935437\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.6,\n \"acc_stderr\": 0.049236596391733084,\n \"acc_norm\": 0.6,\n \"acc_norm_stderr\": 0.049236596391733084\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.6411238825031929,\n \"acc_stderr\": 0.017152991797501342,\n \"acc_norm\": 0.6411238825031929,\n \"acc_norm_stderr\": 0.017152991797501342\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.5,\n \"acc_stderr\": 0.026919095102908273,\n \"acc_norm\": 0.5,\n \"acc_norm_stderr\": 0.026919095102908273\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.23687150837988827,\n \"acc_stderr\": 0.014219570788103986,\n \"acc_norm\": 0.23687150837988827,\n \"acc_norm_stderr\": 0.014219570788103986\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.5032679738562091,\n \"acc_stderr\": 0.028629305194003543,\n \"acc_norm\": 0.5032679738562091,\n \"acc_norm_stderr\": 0.028629305194003543\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.5852090032154341,\n \"acc_stderr\": 0.027982680459759567,\n \"acc_norm\": 0.5852090032154341,\n \"acc_norm_stderr\": 0.027982680459759567\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.5123456790123457,\n \"acc_stderr\": 0.027812262269327242,\n \"acc_norm\": 0.5123456790123457,\n \"acc_norm_stderr\": 0.027812262269327242\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.40070921985815605,\n \"acc_stderr\": 0.029233465745573086,\n \"acc_norm\": 0.40070921985815605,\n \"acc_norm_stderr\": 0.029233465745573086\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.3852672750977836,\n \"acc_stderr\": 0.012429485434955182,\n \"acc_norm\": 0.3852672750977836,\n \"acc_norm_stderr\": 0.012429485434955182\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.47058823529411764,\n \"acc_stderr\": 0.030320243265004137,\n \"acc_norm\": 0.47058823529411764,\n \"acc_norm_stderr\": 0.030320243265004137\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.4820261437908497,\n \"acc_stderr\": 0.020214761037872408,\n \"acc_norm\": 0.4820261437908497,\n \"acc_norm_stderr\": 0.020214761037872408\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6454545454545455,\n \"acc_stderr\": 0.045820048415054174,\n \"acc_norm\": 0.6454545454545455,\n \"acc_norm_stderr\": 0.045820048415054174\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.5346938775510204,\n \"acc_stderr\": 0.03193207024425314,\n \"acc_norm\": 0.5346938775510204,\n \"acc_norm_stderr\": 0.03193207024425314\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.6318407960199005,\n \"acc_stderr\": 0.03410410565495302,\n \"acc_norm\": 0.6318407960199005,\n \"acc_norm_stderr\": 0.03410410565495302\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.69,\n \"acc_stderr\": 0.04648231987117316,\n \"acc_norm\": 0.69,\n \"acc_norm_stderr\": 0.04648231987117316\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.4578313253012048,\n \"acc_stderr\": 0.038786267710023595,\n \"acc_norm\": 0.4578313253012048,\n \"acc_norm_stderr\": 0.038786267710023595\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.6374269005847953,\n \"acc_stderr\": 0.036871306155620606,\n \"acc_norm\": 0.6374269005847953,\n \"acc_norm_stderr\": 0.036871306155620606\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.2962056303549572,\n \"mc1_stderr\": 0.015983595101811392,\n \"mc2\": 0.4472415883922134,\n \"mc2_stderr\": 0.015128282783775687\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.6890292028413575,\n \"acc_stderr\": 0.013009534736286058\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.0576194086429113,\n \"acc_stderr\": 0.006418593319822861\n }\n}\n```", "repo_url": "https://huggingface.co/TeeZee/Buttocks-7B-v1.0", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_01_25T05_45_51.088274", "path": ["**/details_harness|arc:challenge|25_2024-01-25T05-45-51.088274.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-01-25T05-45-51.088274.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_01_25T05_45_51.088274", "path": ["**/details_harness|gsm8k|5_2024-01-25T05-45-51.088274.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-01-25T05-45-51.088274.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_01_25T05_45_51.088274", "path": ["**/details_harness|hellaswag|10_2024-01-25T05-45-51.088274.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-01-25T05-45-51.088274.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_01_25T05_45_51.088274", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-25T05-45-51.088274.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-25T05-45-51.088274.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-25T05-45-51.088274.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-25T05-45-51.088274.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-25T05-45-51.088274.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-25T05-45-51.088274.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-25T05-45-51.088274.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-25T05-45-51.088274.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-25T05-45-51.088274.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-25T05-45-51.088274.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-25T05-45-51.088274.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-25T05-45-51.088274.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-25T05-45-51.088274.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-25T05-45-51.088274.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-25T05-45-51.088274.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-25T05-45-51.088274.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-25T05-45-51.088274.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-25T05-45-51.088274.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-25T05-45-51.088274.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-25T05-45-51.088274.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-25T05-45-51.088274.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-25T05-45-51.088274.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-25T05-45-51.088274.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-25T05-45-51.088274.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-25T05-45-51.088274.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-25T05-45-51.088274.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-25T05-45-51.088274.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-25T05-45-51.088274.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-25T05-45-51.088274.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-25T05-45-51.088274.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-25T05-45-51.088274.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-25T05-45-51.088274.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-25T05-45-51.088274.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-25T05-45-51.088274.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-25T05-45-51.088274.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-25T05-45-51.088274.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-25T05-45-51.088274.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-25T05-45-51.088274.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-25T05-45-51.088274.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-25T05-45-51.088274.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-25T05-45-51.088274.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-25T05-45-51.088274.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-25T05-45-51.088274.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-25T05-45-51.088274.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-25T05-45-51.088274.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-25T05-45-51.088274.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-25T05-45-51.088274.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-25T05-45-51.088274.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-25T05-45-51.088274.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-25T05-45-51.088274.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-25T05-45-51.088274.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-25T05-45-51.088274.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-25T05-45-51.088274.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-25T05-45-51.088274.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-25T05-45-51.088274.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-25T05-45-51.088274.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-25T05-45-51.088274.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-25T05-45-51.088274.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-25T05-45-51.088274.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-25T05-45-51.088274.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-25T05-45-51.088274.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-25T05-45-51.088274.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-25T05-45-51.088274.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-25T05-45-51.088274.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-25T05-45-51.088274.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-25T05-45-51.088274.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-25T05-45-51.088274.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-25T05-45-51.088274.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-25T05-45-51.088274.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-25T05-45-51.088274.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-25T05-45-51.088274.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-25T05-45-51.088274.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-25T05-45-51.088274.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-25T05-45-51.088274.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-25T05-45-51.088274.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-25T05-45-51.088274.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-25T05-45-51.088274.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-25T05-45-51.088274.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-25T05-45-51.088274.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-25T05-45-51.088274.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-25T05-45-51.088274.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-25T05-45-51.088274.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-25T05-45-51.088274.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-25T05-45-51.088274.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-25T05-45-51.088274.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-25T05-45-51.088274.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-25T05-45-51.088274.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-25T05-45-51.088274.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-25T05-45-51.088274.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-25T05-45-51.088274.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-25T05-45-51.088274.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-25T05-45-51.088274.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-25T05-45-51.088274.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-25T05-45-51.088274.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-25T05-45-51.088274.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-25T05-45-51.088274.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-25T05-45-51.088274.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-25T05-45-51.088274.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-25T05-45-51.088274.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-25T05-45-51.088274.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-25T05-45-51.088274.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-25T05-45-51.088274.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-25T05-45-51.088274.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-25T05-45-51.088274.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-25T05-45-51.088274.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-25T05-45-51.088274.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-25T05-45-51.088274.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-25T05-45-51.088274.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-25T05-45-51.088274.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-25T05-45-51.088274.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-25T05-45-51.088274.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-25T05-45-51.088274.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-25T05-45-51.088274.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-25T05-45-51.088274.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_01_25T05_45_51.088274", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-25T05-45-51.088274.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-25T05-45-51.088274.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_01_25T05_45_51.088274", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-25T05-45-51.088274.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-25T05-45-51.088274.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_01_25T05_45_51.088274", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-25T05-45-51.088274.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-25T05-45-51.088274.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_01_25T05_45_51.088274", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-25T05-45-51.088274.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-25T05-45-51.088274.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_01_25T05_45_51.088274", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-25T05-45-51.088274.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-25T05-45-51.088274.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_01_25T05_45_51.088274", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-25T05-45-51.088274.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-25T05-45-51.088274.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_01_25T05_45_51.088274", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-25T05-45-51.088274.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-25T05-45-51.088274.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_01_25T05_45_51.088274", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-25T05-45-51.088274.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-25T05-45-51.088274.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_01_25T05_45_51.088274", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-25T05-45-51.088274.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-25T05-45-51.088274.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_01_25T05_45_51.088274", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-25T05-45-51.088274.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-25T05-45-51.088274.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_01_25T05_45_51.088274", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-25T05-45-51.088274.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-25T05-45-51.088274.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_01_25T05_45_51.088274", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-25T05-45-51.088274.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-25T05-45-51.088274.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_01_25T05_45_51.088274", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-25T05-45-51.088274.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-25T05-45-51.088274.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_01_25T05_45_51.088274", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-25T05-45-51.088274.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-25T05-45-51.088274.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_01_25T05_45_51.088274", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-25T05-45-51.088274.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-25T05-45-51.088274.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_01_25T05_45_51.088274", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-25T05-45-51.088274.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-25T05-45-51.088274.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_01_25T05_45_51.088274", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-25T05-45-51.088274.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-25T05-45-51.088274.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_01_25T05_45_51.088274", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-25T05-45-51.088274.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-25T05-45-51.088274.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_01_25T05_45_51.088274", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-25T05-45-51.088274.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-25T05-45-51.088274.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_01_25T05_45_51.088274", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-25T05-45-51.088274.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-25T05-45-51.088274.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_01_25T05_45_51.088274", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-25T05-45-51.088274.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-25T05-45-51.088274.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_01_25T05_45_51.088274", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-25T05-45-51.088274.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-25T05-45-51.088274.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_01_25T05_45_51.088274", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-25T05-45-51.088274.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-25T05-45-51.088274.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_01_25T05_45_51.088274", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-25T05-45-51.088274.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-25T05-45-51.088274.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_01_25T05_45_51.088274", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-25T05-45-51.088274.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-25T05-45-51.088274.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_01_25T05_45_51.088274", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-25T05-45-51.088274.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-25T05-45-51.088274.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_01_25T05_45_51.088274", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-25T05-45-51.088274.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-25T05-45-51.088274.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_01_25T05_45_51.088274", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-25T05-45-51.088274.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-25T05-45-51.088274.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_01_25T05_45_51.088274", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-25T05-45-51.088274.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-25T05-45-51.088274.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_01_25T05_45_51.088274", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-25T05-45-51.088274.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-25T05-45-51.088274.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_01_25T05_45_51.088274", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-25T05-45-51.088274.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-25T05-45-51.088274.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_01_25T05_45_51.088274", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-25T05-45-51.088274.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-25T05-45-51.088274.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_01_25T05_45_51.088274", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-25T05-45-51.088274.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-25T05-45-51.088274.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_01_25T05_45_51.088274", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-25T05-45-51.088274.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-25T05-45-51.088274.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_01_25T05_45_51.088274", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-25T05-45-51.088274.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-25T05-45-51.088274.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_01_25T05_45_51.088274", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-25T05-45-51.088274.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-25T05-45-51.088274.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_01_25T05_45_51.088274", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-25T05-45-51.088274.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-25T05-45-51.088274.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_01_25T05_45_51.088274", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-25T05-45-51.088274.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-25T05-45-51.088274.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_01_25T05_45_51.088274", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-25T05-45-51.088274.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-25T05-45-51.088274.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_01_25T05_45_51.088274", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-25T05-45-51.088274.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-25T05-45-51.088274.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_01_25T05_45_51.088274", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-25T05-45-51.088274.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-25T05-45-51.088274.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_01_25T05_45_51.088274", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-25T05-45-51.088274.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-25T05-45-51.088274.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_01_25T05_45_51.088274", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-25T05-45-51.088274.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-25T05-45-51.088274.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_01_25T05_45_51.088274", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-25T05-45-51.088274.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-25T05-45-51.088274.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_01_25T05_45_51.088274", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-25T05-45-51.088274.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-25T05-45-51.088274.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_01_25T05_45_51.088274", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-25T05-45-51.088274.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-25T05-45-51.088274.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_01_25T05_45_51.088274", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-25T05-45-51.088274.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-25T05-45-51.088274.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_01_25T05_45_51.088274", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-25T05-45-51.088274.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-25T05-45-51.088274.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_01_25T05_45_51.088274", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-25T05-45-51.088274.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-25T05-45-51.088274.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_01_25T05_45_51.088274", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-25T05-45-51.088274.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-25T05-45-51.088274.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_01_25T05_45_51.088274", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-25T05-45-51.088274.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-25T05-45-51.088274.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_01_25T05_45_51.088274", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-25T05-45-51.088274.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-25T05-45-51.088274.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_01_25T05_45_51.088274", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-25T05-45-51.088274.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-25T05-45-51.088274.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_01_25T05_45_51.088274", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-25T05-45-51.088274.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-25T05-45-51.088274.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_01_25T05_45_51.088274", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-25T05-45-51.088274.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-25T05-45-51.088274.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_01_25T05_45_51.088274", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-25T05-45-51.088274.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-25T05-45-51.088274.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_01_25T05_45_51.088274", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-25T05-45-51.088274.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-25T05-45-51.088274.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_01_25T05_45_51.088274", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-25T05-45-51.088274.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-25T05-45-51.088274.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_01_25T05_45_51.088274", "path": ["**/details_harness|winogrande|5_2024-01-25T05-45-51.088274.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-01-25T05-45-51.088274.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_01_25T05_45_51.088274", "path": ["results_2024-01-25T05-45-51.088274.parquet"]}, {"split": "latest", "path": ["results_2024-01-25T05-45-51.088274.parquet"]}]}]}
2024-01-25T05:48:33+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of TeeZee/Buttocks-7B-v1.0 Dataset automatically created during the evaluation run of model TeeZee/Buttocks-7B-v1.0 on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-01-25T05:45:51.088274(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of TeeZee/Buttocks-7B-v1.0\n\n\n\nDataset automatically created during the evaluation run of model TeeZee/Buttocks-7B-v1.0 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-25T05:45:51.088274(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of TeeZee/Buttocks-7B-v1.0\n\n\n\nDataset automatically created during the evaluation run of model TeeZee/Buttocks-7B-v1.0 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-25T05:45:51.088274(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ 6, 185, 68, 4, 40, 29, 3, 4, 9, 6, 5, 7, 4, 7, 10, 9, 5, 9, 8, 10, 46, 8, 7, 10, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of TeeZee/Buttocks-7B-v1.0\n\n\n\nDataset automatically created during the evaluation run of model TeeZee/Buttocks-7B-v1.0 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2024-01-25T05:45:51.088274(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):## Dataset Details### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:## Uses### Direct Use### Out-of-Scope Use## Dataset Structure## Dataset Creation### Curation Rationale### Source Data#### Data Collection and Processing#### Who are the source data producers?### Annotations [optional]#### Annotation process#### Who are the annotators?#### Personal and Sensitive Information## Bias, Risks, and Limitations### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:## Glossary [optional]## More Information [optional]## Dataset Card Authors [optional]## Dataset Card Contact" ]
[ -0.04603687301278114, 0.1978493481874466, -0.005060289520770311, 0.0432012565433979, 0.07283857464790344, -0.011243822984397411, 0.04808541014790535, 0.10223376750946045, 0.032981984317302704, 0.18398018181324005, -0.024128546938300133, 0.11277471482753754, 0.06847305595874786, 0.129176065325737, 0.029471853747963905, -0.12758219242095947, 0.02303493022918701, -0.08252373337745667, 0.11021143943071365, 0.08249641954898834, 0.059859294444322586, -0.07431276887655258, 0.06824585795402527, -0.021935803815722466, 0.03377721831202507, -0.011038832366466522, -0.07743184268474579, -0.02942664735019207, 0.10001160949468613, 0.10938194394111633, 0.03421428054571152, -0.017649924382567406, 0.02161601185798645, -0.27025604248046875, 0.014994136057794094, 0.09226346760988235, -0.009648574516177177, 0.033527910709381104, 0.13218684494495392, -0.08009731769561768, 0.08598747849464417, -0.019488492980599403, 0.08335357159376144, 0.058074984699487686, -0.11000633984804153, -0.14646264910697937, -0.1522434949874878, 0.005983008537441492, 0.05567970126867294, 0.042027708142995834, -0.027502641081809998, 0.16479064524173737, -0.07466761767864227, 0.049905095249414444, 0.14338049292564392, -0.10172571241855621, -0.017665309831500053, 0.04289401322603226, 0.01951693743467331, 0.07325035333633423, -0.08404286950826645, -0.02714044228196144, 0.036768071353435516, 0.05908888205885887, -0.006151088513433933, 0.014783836901187897, -0.017833059653639793, 0.006974342279136181, -0.13784009218215942, -0.13160531222820282, 0.1529054492712021, 0.009665566496551037, -0.05568674951791763, -0.17933456599712372, -0.013676194474101067, 0.017063362523913383, -0.0009498029248788953, 0.0011931732296943665, -0.0058178165927529335, -0.018329275771975517, 0.09892848134040833, -0.003199306782335043, -0.10290206968784332, -0.028106125071644783, -0.0032433033920824528, 0.07427305728197098, 0.02469763532280922, -0.010148361325263977, 0.0068414174020290375, 0.12057780474424362, 0.01754995621740818, -0.06136395409703255, -0.07553667575120926, -0.05843565985560417, -0.12178054451942444, -0.04378793016076088, 0.020701831206679344, -0.05922101438045502, 0.034008558839559555, 0.24103839695453644, 0.004072934854775667, 0.028773413971066475, -0.12353592365980148, 0.009391522966325283, 0.12239743769168854, 0.05128484219312668, -0.07710561901330948, -0.05563492700457573, -0.03568525239825249, 0.01982349343597889, 0.03580823913216591, -0.013743704184889793, 0.013285640627145767, 0.061043526977300644, 0.02811124548316002, 0.12364722043275833, 0.12355044484138489, 0.030339032411575317, -0.0778428167104721, -0.01890384592115879, 0.2510947287082672, -0.13695591688156128, -0.020128654316067696, 0.0183644387871027, -0.0361420176923275, -0.1193603053689003, 0.06535293161869049, -0.005767937283962965, -0.05170299485325813, 0.12020112574100494, -0.04595256596803665, -0.08349886536598206, -0.06963872164487839, -0.06664639711380005, 0.05607914924621582, 0.013689675368368626, -0.04660048335790634, -0.06565666198730469, -0.10563529282808304, -0.08109691739082336, 0.027237780392169952, -0.06500571221113205, -0.024205993860960007, 0.025479421019554138, -0.003976988606154919, -0.01070434134453535, -0.012170433066785336, 0.1218080222606659, -0.06348215788602829, 0.037876661866903305, -0.013598205521702766, 0.01829918660223484, 0.09556775540113449, 0.03689826652407646, -0.1174284964799881, 0.07616668194532394, -0.12595447897911072, 0.09932693839073181, -0.11625202000141144, -0.0238935723900795, -0.11858048290014267, -0.004034457262605429, -0.028852110728621483, 0.042848631739616394, -0.03872166946530342, 0.0835522785782814, -0.22029845416545868, 0.00008469469321426004, 0.15524190664291382, -0.12289299815893173, -0.07711786031723022, 0.09593154489994049, -0.0444185733795166, 0.07032889127731323, 0.046802159398794174, 0.10154978185892105, 0.11056084930896759, -0.08711772412061691, -0.08939425647258759, -0.06054127961397171, -0.031023023650050163, 0.16383089125156403, 0.06307549774646759, -0.08361304551362991, 0.10049039870500565, 0.05051541328430176, -0.010260402224957943, -0.07005815207958221, -0.0019046033266931772, -0.06446018815040588, -0.023097112774848938, -0.07024289667606354, -0.049535371363162994, -0.004007951822131872, -0.074636310338974, -0.024099308997392654, -0.0824076309800148, -0.012166720815002918, 0.10315138101577759, -0.023621877655386925, 0.01207626610994339, -0.07223120331764221, 0.022566435858607292, 0.010400744155049324, 0.010384637862443924, -0.21820008754730225, -0.08875101804733276, 0.03365349769592285, -0.21154464781284332, 0.057009998708963394, 0.044763922691345215, 0.008479258976876736, 0.05029131472110748, -0.006248606834560633, 0.03405606746673584, 0.01894337311387062, -0.010997854173183441, -0.01292966865003109, -0.14422687888145447, -0.06072587892413139, -0.08564647287130356, 0.09006801247596741, -0.13626624643802643, -0.012881351634860039, 0.06046394258737564, 0.14837343990802765, 0.02373444475233555, -0.07943836599588394, 0.05799209699034691, 0.00965819787234068, -0.04585680365562439, -0.05179829150438309, -0.0071824342012405396, -0.029714390635490417, 0.04447101429104805, 0.031366247683763504, -0.19511881470680237, -0.10466919094324112, 0.06845357269048691, 0.1376051902770996, -0.07299619168043137, -0.07924056798219681, -0.062285080552101135, -0.06351785361766815, -0.08880872279405594, -0.07345395535230637, 0.06063932180404663, 0.08941402286291122, 0.04110337048768997, -0.07218300551176071, -0.056125350296497345, 0.010249458253383636, 0.055862944573163986, -0.060759592801332474, 0.11187178641557693, 0.07275526970624924, -0.07960730046033859, 0.10642745345830917, -0.047670017927885056, 0.11376301199197769, 0.08476662635803223, 0.03362436220049858, -0.10182517766952515, 0.00443029310554266, 0.06062775477766991, 0.04412868246436119, 0.0756584033370018, -0.06179419904947281, 0.036926545202732086, 0.08598869293928146, -0.01210852712392807, 0.034079551696777344, -0.06625601649284363, 0.026534870266914368, 0.034165047109127045, 0.003927980549633503, 0.013495104387402534, 0.014886375516653061, 0.020143941044807434, 0.08341877907514572, 0.019797250628471375, 0.10481198132038116, -0.025397183373570442, -0.051277413964271545, -0.10493118315935135, 0.1423252820968628, -0.0823746770620346, -0.27656328678131104, -0.1635429561138153, -0.04386639967560768, -0.031379323452711105, -0.01160147413611412, 0.06623139977455139, -0.005820700898766518, -0.1092015877366066, -0.114385686814785, 0.05755474045872688, 0.041894327849149704, -0.13662433624267578, -0.05424634739756584, 0.055049218237400055, -0.01202710997313261, -0.17414356768131256, 0.043028730899095535, 0.04766588285565376, -0.06050964817404747, -0.002437739633023739, 0.06744606792926788, 0.10892637073993683, 0.09095142036676407, 0.08336903154850006, -0.028724107891321182, -0.009891199879348278, 0.17021960020065308, -0.11084870249032974, 0.0339750200510025, 0.10827373713254929, -0.0497540645301342, 0.06289827078580856, 0.15952926874160767, 0.011985604651272297, -0.0910055935382843, 0.060360342264175415, 0.09834099560976028, -0.07141459733247757, -0.2446325570344925, -0.1265663355588913, -0.03535666689276695, 0.02201484888792038, 0.1156826838850975, 0.06501835584640503, 0.03279756382107735, 0.011913137510418892, -0.13115641474723816, -0.01861437037587166, -0.047789424657821655, 0.07033411413431168, 0.05819219723343849, -0.0037543915677815676, 0.04042118415236473, -0.047170452773571014, 0.020148809999227524, 0.12058550119400024, 0.050547778606414795, 0.14177362620830536, -0.040413741022348404, 0.19090484082698822, 0.09406198561191559, 0.08190113306045532, -0.04234372079372406, 0.03967421501874924, -0.0079400185495615, 0.06203595921397209, -0.015211320482194424, -0.10700377076864243, -0.051280394196510315, 0.10235102474689484, 0.0358402244746685, -0.0713638886809349, 0.04036770761013031, -0.0953218936920166, 0.0329451858997345, 0.19469217956066132, -0.028505945578217506, -0.11918714642524719, -0.059589505195617676, 0.06365849822759628, -0.038656849414110184, -0.09395141899585724, -0.011933737434446812, 0.082556813955307, -0.14999790489673615, 0.013123814947903156, -0.032898154109716415, 0.07484281063079834, -0.12714262306690216, -0.028737269341945648, -0.02996981330215931, 0.03454665467143059, -0.004987562075257301, 0.11479035764932632, -0.1299692988395691, 0.08918723464012146, -0.0036444184370338917, 0.020805219188332558, -0.11609143763780594, 0.05206041410565376, -0.03600694239139557, -0.06537790596485138, 0.13101617991924286, -0.01338331401348114, -0.06922407448291779, -0.042589038610458374, -0.10352545231580734, -0.008156918920576572, 0.04809338599443436, -0.10070407390594482, 0.10296965390443802, 0.028729554265737534, -0.023717304691672325, -0.026630865409970284, -0.013108424842357635, -0.12204703688621521, -0.24314704537391663, 0.107977956533432, -0.12962792813777924, 0.024936361238360405, -0.06732518970966339, -0.05185646936297417, -0.033837441354990005, 0.14351297914981842, -0.09067930281162262, -0.052304577082395554, -0.10482339560985565, -0.01629173383116722, 0.18425992131233215, -0.04651147872209549, 0.060678210109472275, -0.04209526628255844, 0.1911134570837021, -0.024288911372423172, -0.041486695408821106, -0.007637448608875275, -0.0903981626033783, -0.195744588971138, -0.051129233092069626, 0.11571800708770752, 0.0828741192817688, 0.018143653869628906, -0.005224612075835466, 0.010042213834822178, 0.018157554790377617, -0.10094761848449707, 0.0196516253054142, 0.10877470672130585, 0.11916831880807877, 0.044939782470464706, -0.0240507572889328, -0.13055898249149323, -0.10042601823806763, -0.10801582038402557, 0.04548337683081627, 0.17991772294044495, -0.06204909086227417, 0.17529790103435516, 0.14767126739025116, -0.09075965732336044, -0.19026634097099304, -0.05698980391025543, 0.022054877132177353, -0.02736755833029747, 0.1349795162677765, -0.19183866679668427, 0.06780364364385605, 0.07308094203472137, -0.02806580439209938, 0.11970169842243195, -0.27211737632751465, -0.1424105018377304, 0.040725745260715485, 0.04522208496928215, -0.23559074103832245, -0.17461565136909485, -0.09995529055595398, -0.027670420706272125, -0.1850111037492752, 0.14773176610469818, 0.008566418662667274, 0.02832573838531971, -0.024348685517907143, 0.08644047379493713, 0.05071514844894409, -0.07036367803812027, 0.12822924554347992, -0.012032093480229378, 0.024568485096096992, -0.10216245800256729, -0.04094979166984558, 0.005107541102916002, -0.039175380021333694, 0.07557576149702072, 0.021976467221975327, 0.0466916486620903, -0.0785943791270256, -0.03692539408802986, -0.07906749844551086, 0.05540001019835472, -0.07987778633832932, -0.05612575262784958, -0.0795503556728363, 0.08062900602817535, 0.08009817451238632, -0.011667889542877674, 0.025390271097421646, -0.04696144536137581, 0.047432124614715576, 0.2048679143190384, 0.09935145080089569, 0.04372291639447212, -0.09389781206846237, -0.039099279791116714, -0.015696154907345772, -0.00936131551861763, -0.1094156876206398, 0.04540049657225609, 0.07419972121715546, 0.0507136769592762, 0.08885660767555237, -0.025582604110240936, -0.1818222999572754, 0.004036580212414265, 0.06928430497646332, -0.08241116255521774, -0.18970783054828644, 0.04452540725469589, 0.14892563223838806, -0.15431690216064453, -0.07524190098047256, 0.07142463326454163, 0.020787369459867477, -0.04022253677248955, -0.004169660620391369, 0.07530681788921356, 0.05327622964978218, 0.10427016019821167, 0.010978898964822292, 0.046297118067741394, -0.07604482024908066, 0.08559568971395493, 0.13420598208904266, -0.10001328587532043, 0.007406014949083328, 0.027151094749569893, -0.051762066781520844, -0.068990059196949, -0.0005589494248852134, 0.003170931013301015, 0.02208864502608776, -0.029953381046652794, 0.029948776587843895, -0.028718404471874237, 0.06038734316825867, 0.13716307282447815, -0.0012492230162024498, 0.05170173570513725, 0.017653770744800568, 0.0016537923365831375, -0.06301172822713852, 0.09774524718523026, 0.028264891356229782, 0.04860581085085869, -0.03432409092783928, 0.03172215819358826, 0.02122177742421627, -0.025380579754710197, 0.020261423662304878, -0.055017974227666855, -0.06594538688659668, 0.0037039262242615223, -0.1676253378391266, 0.05987079069018364, -0.08835328370332718, 0.007852558046579361, 0.0005406169220805168, -0.0256627406924963, -0.007437299471348524, 0.006699894554913044, -0.08029229938983917, -0.04527440667152405, -0.04328106716275215, 0.13811515271663666, -0.19407406449317932, 0.001222905470058322, 0.09497340023517609, -0.074517160654068, 0.062349386513233185, -0.009515095502138138, -0.022959956899285316, 0.033160749822854996, -0.12247584015130997, 0.00005431221870821901, -0.02233639918267727, 0.06399862468242645, 0.013337849639356136, -0.13032962381839752, -0.018525194376707077, 0.004486045800149441, -0.07425356656312943, -0.012066756375133991, 0.03612188994884491, -0.15619072318077087, 0.060224369168281555, 0.08429194241762161, -0.05587435141205788, -0.04256488010287285, 0.04860958829522133, 0.05205446109175682, -0.010952943935990334, 0.10139515995979309, -0.0007232072530314326, 0.022860031574964523, -0.15590538084506989, -0.04668122157454491, 0.0005263580242171884, 0.014037083834409714, 0.04856029525399208, 0.022214744240045547, 0.018360255286097527, 0.013864614069461823, 0.23822875320911407, -0.02003454975783825, 0.03507952392101288, 0.019828421995043755, -0.007133609149605036, -0.02975248359143734, 0.025556281208992004, 0.021434294059872627, -0.011695696040987968, 0.029199155047535896, 0.01879480481147766, -0.03913876786828041, -0.06021249666810036, -0.030329162254929543, 0.07437293976545334, 0.13043645024299622, 0.140343576669693, -0.04169592633843422, 0.06441084295511246, -0.16495905816555023, -0.061253707855939865, 0.010382588021457195, -0.04362711310386658, 0.049610115587711334, -0.07518059015274048, 0.0665769875049591, 0.0830700471997261, -0.09640619158744812, 0.14989642798900604, -0.05603378266096115, -0.025246955454349518, -0.031131930649280548, -0.16590797901153564, -0.03666713088750839, 0.03557181358337402, 0.003164849942550063, -0.088364377617836, 0.12188812345266342, 0.1341545581817627, -0.010124490596354008, -0.0005572681548073888, 0.08242713660001755, -0.06421547383069992, -0.057616278529167175, -0.03930884599685669, 0.0019371270900592208, 0.00665059732273221, -0.0016262257704511285, 0.07726594060659409, 0.01350259967148304, 0.06191476434469223, 0.07264713943004608, 0.1019124910235405, 0.025927679613232613, 0.005709200166165829, -0.045175112783908844, -0.052046481519937515, 0.0018838493851944804, -0.023774847388267517, -0.05431733652949333, 0.2106788456439972, 0.0517471544444561, 0.01463325135409832, 0.01776258274912834, 0.21850185096263885, -0.002608930692076683, -0.07531321048736572, -0.13190332055091858, 0.13270048797130585, 0.0005169218638911843, 0.016964107751846313, 0.03517836704850197, -0.11330289393663406, 0.03253190591931343, 0.15552863478660583, 0.09833817183971405, 0.05804729834198952, 0.013377822004258633, 0.042728349566459656, 0.02111535146832466, -0.03612831234931946, 0.0620073527097702, 0.02291879802942276, 0.24138590693473816, -0.05775957182049751, 0.09171672910451889, -0.004080329556018114, 0.00014623426250182092, -0.02240518480539322, 0.10405489057302475, -0.052571989595890045, 0.012937837280333042, -0.06968364119529724, 0.09191019088029861, -0.05749177187681198, -0.261745423078537, -0.002529522869735956, -0.0728863924741745, -0.13917045295238495, -0.008110824041068554, 0.024378564208745956, -0.026226148009300232, 0.04734773188829422, 0.030131729319691658, -0.023324083536863327, 0.18613362312316895, -0.001734944642521441, -0.07761699706315994, -0.07526197284460068, 0.07087798416614532, -0.03665582835674286, 0.2948835790157318, -0.0032425508834421635, 0.06753511726856232, 0.08827298879623413, -0.01609724387526512, -0.1333094835281372, 0.0211113840341568, 0.08995392173528671, -0.04671704024076462, 0.05955806002020836, 0.16427947580814362, -0.02739267796278, 0.14419540762901306, 0.0333239771425724, -0.02416541613638401, 0.07540689408779144, 0.07777557522058487, 0.04822385311126709, -0.1004035696387291, 0.07631585747003555, -0.09280572831630707, 0.13510450720787048, 0.0977218747138977, -0.01411380060017109, -0.009761439636349678, -0.06252986192703247, 0.06186762824654579, -0.03325096517801285, 0.14476262032985687, -0.020661670714616776, -0.16240932047367096, 0.04813945293426514, 0.021777929738163948, 0.061892662197351456, -0.23821012675762177, -0.057270318269729614, 0.10681229829788208, -0.0574263334274292, 0.014568104408681393, 0.0862826332449913, 0.04223747178912163, 0.028943117707967758, -0.05567856505513191, -0.12310920655727386, 0.012555485591292381, 0.12665240466594696, -0.0816195160150528, -0.040166135877370834 ]
c5a5e2bdb8de406c65803db5ef3296d9eae3407d
# 🚀 Load Dataset ```python from datasets import load_dataset dataset = load_dataset("shuyuej/prompt_consistency_training_fewer") dataset = dataset["train"] print(dataset) ```
shuyuej/prompt_consistency_training_fewer
[ "license:apache-2.0", "region:us" ]
2024-01-25T05:51:41+00:00
{"license": "apache-2.0"}
2024-01-25T06:27:22+00:00
[]
[]
TAGS #license-apache-2.0 #region-us
# Load Dataset
[ "# Load Dataset" ]
[ "TAGS\n#license-apache-2.0 #region-us \n", "# Load Dataset" ]
[ 14, 5 ]
[ "passage: TAGS\n#license-apache-2.0 #region-us \n# Load Dataset" ]
[ -0.07001654803752899, 0.1904653012752533, -0.004998877178877592, 0.00907934457063675, -0.0028538380283862352, 0.054394908249378204, 0.16303427517414093, 0.12618236243724823, 0.1763383150100708, -0.06096614897251129, 0.09710494428873062, 0.04643046483397484, 0.020282577723264694, 0.11439700424671173, 0.015474379062652588, -0.10245008021593094, 0.11215199530124664, -0.03296778351068497, -0.15413860976696014, -0.020041679963469505, 0.06634549796581268, 0.008282235823571682, 0.018562356010079384, -0.06968339532613754, -0.006373110227286816, 0.03640283644199371, 0.0020335251465439796, 0.016162084415555, 0.02330676279962063, -0.038277264684438705, 0.005731794983148575, 0.029110433533787727, 0.03391251340508461, -0.20263391733169556, 0.002159419935196638, 0.011697918176651001, -0.072933629155159, 0.021473249420523643, 0.015562492422759533, 0.013006992638111115, -0.014430290088057518, 0.008538242429494858, -0.04590527340769768, 0.004893551114946604, -0.029969552531838417, -0.23031283915042877, -0.13999857008457184, 0.03098377026617527, 0.061859868466854095, 0.04261074587702751, 0.08520697802305222, 0.112308070063591, -0.1684703677892685, -0.023576728999614716, 0.05644429847598076, -0.2658948600292206, 0.03770916908979416, 0.1925373077392578, -0.027282025665044785, 0.05708051100373268, -0.032640136778354645, 0.014179641380906105, 0.09702048450708389, -0.03234678879380226, -0.05952118709683418, -0.019281448796391487, -0.14307193458080292, 0.12061256915330887, 0.007947882637381554, -0.07937075942754745, 0.4276890456676483, 0.08947485685348511, 0.03446684032678604, 0.04088602960109711, -0.04162471741437912, 0.08802156895399094, -0.00882687047123909, 0.11228219419717789, 0.10370917618274689, 0.17537449300289154, 0.09373819082975388, -0.05837281793355942, -0.1387629508972168, -0.0764801874756813, -0.14893755316734314, -0.0881175547838211, 0.002753246808424592, 0.14657826721668243, -0.11670559644699097, -0.011121216230094433, -0.013173341751098633, -0.08000656962394714, -0.016438452526926994, -0.06224498152732849, 0.05970108509063721, 0.07276752591133118, -0.06468930095434189, 0.08973073959350586, 0.19600972533226013, 0.24304203689098358, 0.10773353278636932, -0.0031716858502477407, -0.09783545881509781, 0.1311764270067215, 0.02450001984834671, 0.0035947742871940136, 0.031216908246278763, -0.03225255012512207, 0.1274290233850479, -0.11814776808023453, 0.10293827950954437, -0.03236187621951103, -0.12062004208564758, -0.015531999059021473, -0.12012672424316406, 0.0924612358212471, 0.12156467139720917, -0.07433512806892395, -0.02871553599834442, -0.0001638674148125574, 0.1914612352848053, -0.07343824207782745, 0.011609512381255627, 0.037060488015413284, -0.04014584422111511, 0.019367489963769913, 0.07047323882579803, 0.01618986763060093, 0.051165465265512466, -0.07353158295154572, -0.07518532872200012, -0.04754794016480446, 0.006732400972396135, 0.08390267193317413, 0.16801878809928894, -0.0787782073020935, 0.02774466574192047, -0.07314363867044449, -0.2640254497528076, 0.015318970195949078, 0.10718841850757599, 0.01326839905232191, -0.04644060134887695, 0.0937519520521164, 0.04369577765464783, 0.0219112578779459, -0.07621479034423828, 0.021636027842760086, -0.10924455523490906, 0.01635110005736351, -0.18735843896865845, -0.00034857707214541733, -0.21728886663913727, 0.029500821605324745, -0.14498181641101837, 0.015984103083610535, 0.008336689323186874, -0.036203473806381226, -0.16693279147148132, 0.18584993481636047, -0.12228277325630188, 0.07665608078241348, -0.022888079285621643, -0.027417439967393875, -0.05536381155252457, 0.10486848652362823, -0.1901201605796814, 0.013646816834807396, 0.11956170201301575, -0.1315540224313736, -0.1981583535671234, -0.008748158812522888, 0.032004132866859436, 0.03905763477087021, 0.019999176263809204, 0.2750741243362427, 0.03931259736418724, -0.039619818329811096, 0.05513544753193855, 0.21230784058570862, -0.037666887044906616, -0.30606216192245483, 0.1337161511182785, -0.14175526797771454, -0.09446083754301071, 0.035942576825618744, -0.05984149128198624, 0.10572918504476547, 0.05563924461603165, -0.09842493385076523, -0.05832784250378609, -0.11846277117729187, -0.05080516263842583, -0.007119585759937763, 0.011414838954806328, -0.003772859927266836, 0.06987990438938141, -0.03394223004579544, 0.12409792840480804, 0.0430963933467865, 0.06995843350887299, -0.0019742429722100496, 0.010701647028326988, -0.032948900014162064, 0.01685371808707714, -0.05274543538689613, -0.11824917048215866, 0.03222540020942688, -0.07840131968259811, 0.010248200036585331, 0.0754818469285965, 0.08918923884630203, -0.10875682532787323, 0.004986066371202469, 0.054788678884506226, 0.05776919424533844, 0.09128472954034805, 0.004639789462089539, -0.1553730070590973, 0.03658977895975113, -0.007291417568922043, 0.11663542687892914, 0.013290156610310078, -0.015077928081154823, 0.02553686872124672, 0.030704280361533165, -0.01582513377070427, 0.021987546235322952, 0.012808484956622124, -0.15357114374637604, 0.013035740703344345, -0.029535915702581406, 0.0515626035630703, 0.07516250759363174, -0.10649967193603516, 0.17749257385730743, 0.05246195197105408, 0.0937701091170311, 0.1691731959581375, -0.0031942089553922415, 0.15303511917591095, -0.07711337506771088, -0.03034058026969433, -0.08476773649454117, 0.019114429131150246, -0.03520386293530464, -0.19912157952785492, 0.022483263164758682, -0.006603246089071035, -0.03966274857521057, 0.011731458827853203, -0.05206333473324776, -0.05032000690698624, -0.01633274368941784, -0.012912747450172901, 0.22682836651802063, -0.0740542858839035, 0.16127049922943115, 0.3929736912250519, 0.009790927171707153, 0.03198212757706642, -0.15585008263587952, -0.08811907470226288, 0.025037283077836037, -0.02013486996293068, -0.07733090966939926, 0.1421229988336563, -0.08407855778932571, 0.07925056666135788, 0.11559281498193741, 0.07515023648738861, 0.048295337706804276, -0.09319842606782913, -0.08565586805343628, -0.016870850697159767, -0.06494379788637161, -0.0757361501455307, 0.006179455202072859, -0.09064626693725586, 0.038997307419776917, -0.009553880430758, -0.08711880445480347, 0.1416091024875641, -0.06720657646656036, -0.07858653366565704, 0.08501026779413223, -0.1773792803287506, -0.017761990427970886, -0.04987482354044914, -0.05524556711316109, -0.05987956374883652, -0.011223694309592247, 0.03956194967031479, -0.07345953583717346, -0.06481890380382538, -0.0044582197442650795, -0.11862372606992722, 0.05133015662431717, -0.002835777821019292, -0.001831702538765967, 0.07515180855989456, 0.020108293741941452, -0.13118132948875427, -0.020894730463624, 0.014332571998238564, -0.011530141346156597, 0.020547814667224884, -0.12734408676624298, 0.08069302886724472, 0.11161760985851288, 0.07985270023345947, 0.040471069514751434, -0.002052333438768983, 0.08670412749052048, -0.0023793831933289766, 0.0037481726612895727, 0.16268162429332733, 0.012608258984982967, 0.018607959151268005, 0.11706527322530746, 0.05305314064025879, -0.048632312566041946, 0.0070783342234790325, 0.014489368535578251, -0.11434327811002731, -0.3088737428188324, -0.126278817653656, -0.0808558538556099, 0.07748565077781677, 0.08287206292152405, 0.14348120987415314, 0.05805513635277748, 0.07937482744455338, -0.020483603700995445, 0.006435450632125139, 0.013887143693864346, -0.036056581884622574, 0.031847286969423294, -0.035910844802856445, -0.01373085007071495, -0.17227716743946075, 0.07147528976202011, 0.19122646749019623, 0.1603180468082428, 0.19476060569286346, 0.19143876433372498, 0.13832567632198334, 0.09698405116796494, 0.18953464925289154, -0.040424197912216187, 0.12604868412017822, 0.058183349668979645, 0.027232512831687927, -0.01953265815973282, -0.0491039864718914, -0.0196559876203537, 0.10182230174541473, 0.03333480656147003, -0.174533411860466, 0.03443571925163269, -0.16347607970237732, 0.07128030061721802, 0.12810580432415009, 0.09226731956005096, -0.07216334342956543, 0.11737333983182907, 0.12907880544662476, 0.11842317879199982, 0.0372268371284008, 0.13275204598903656, -0.0738038718700409, -0.04381672292947769, 0.11593903601169586, 0.027494866400957108, 0.10757297277450562, 0.006952364929020405, -0.0514223575592041, -0.05423459783196449, -0.1831110715866089, 0.07742337882518768, 0.1742519736289978, -0.11897630244493484, 0.1527315080165863, 0.007360770832747221, -0.07842399924993515, -0.10153786838054657, -0.035853754729032516, 0.07475770264863968, 0.13980795443058014, 0.10237900912761688, 0.09461832791566849, -0.16316671669483185, 0.1071644052863121, -0.18991471827030182, 0.03761889785528183, -0.06512397527694702, -0.02938513644039631, -0.13786184787750244, -0.03136984631419182, 0.018966924399137497, 0.04204443097114563, 0.14526686072349548, -0.08443856239318848, -0.0934140607714653, -0.03135392814874649, 0.16512109339237213, -0.07707548886537552, -0.09012707322835922, 0.04150933399796486, -0.01470975112169981, 0.13258203864097595, -0.0013804734917357564, -0.03641519695520401, -0.0624312199652195, -0.13648180663585663, 0.12659449875354767, -0.006370837800204754, -0.01564682088792324, -0.04408372566103935, -0.053515125066041946, -0.07499849051237106, -0.22749395668506622, 0.09926195442676544, -0.11815635859966278, 0.029708122834563255, -0.05135143920779228, 0.08055438846349716, -0.04865441098809242, 0.011574150063097477, 0.013341099955141544, 0.00196447572670877, -0.04447980970144272, -0.12119755893945694, 0.07120175659656525, 0.05407170578837395, 0.0179436057806015, 0.07252787798643112, -0.05706556513905525, 0.04152284935116768, 0.139897882938385, -0.08164766430854797, 0.1449587196111679, 0.16808827221393585, -0.08507421612739563, 0.16733917593955994, 0.3047759532928467, -0.09884221851825714, -0.27654603123664856, -0.13852934539318085, -0.22699107229709625, -0.1497262865304947, 0.05740240216255188, -0.16743381321430206, 0.1785702109336853, 0.15324127674102783, -0.17003217339515686, 0.15567822754383087, -0.20013009011745453, -0.04761470854282379, 0.22139687836170197, -0.07744777947664261, 0.3270156681537628, -0.15036125481128693, -0.06321949511766434, -0.1357784867286682, -0.14006933569908142, 0.1640874743461609, -0.2520224452018738, 0.009696963243186474, 0.03358783572912216, -0.07571630924940109, -0.053476277738809586, -0.06294088065624237, 0.20365726947784424, 0.08864487707614899, 0.048724330961704254, -0.07783883810043335, 0.0538349449634552, 0.17002438008785248, -0.08258774876594543, 0.11427507549524307, -0.1551506072282791, -0.007821562699973583, -0.11334814876317978, 0.049372438341379166, -0.007536802440881729, 0.07303659617900848, 0.018435997888445854, -0.055110782384872437, -0.09097205847501755, -0.012564500793814659, -0.0010017354506999254, 0.017773348838090897, 0.2545160949230194, 0.12870335578918457, -0.09068934619426727, 0.12058752775192261, -0.08183950185775757, -0.10650019347667694, -0.09857156872749329, -0.09697787463665009, -0.09271606057882309, 0.05486408993601799, -0.29307621717453003, 0.06150501221418381, 0.04435229301452637, -0.056741055101156235, 0.021166298538446426, 0.04856487363576889, -0.07821919023990631, -0.047246869653463364, 0.10806192457675934, -0.05617867782711983, 0.0060365828685462475, 0.06188586354255676, 0.06320629268884659, 0.01908440701663494, 0.015047809109091759, 0.07980872690677643, 0.02779097482562065, 0.03299710527062416, 0.02156687341630459, 0.1232525184750557, -0.1096741333603859, 0.025780048221349716, 0.08234716206789017, -0.03277469798922539, -0.1291341930627823, 0.27868539094924927, 0.0328671969473362, -0.07490034401416779, -0.014853513799607754, 0.02017960511147976, -0.08733731508255005, -0.11030066758394241, 0.03357265517115593, 0.05562034994363785, -0.0790076032280922, -0.16002187132835388, 0.04434940963983536, -0.04750889167189598, -0.011495170183479786, -0.09123263508081436, 0.12545834481716156, 0.11386078596115112, 0.07958052307367325, -0.08150102943181992, 0.09422098100185394, -0.015888547524809837, -0.11720315366983414, -0.00965417642146349, -0.04330809786915779, -0.27147001028060913, 0.0114149060100317, 0.07767054438591003, -0.01839832030236721, -0.024710092693567276, -0.05538126826286316, 0.068058542907238, -0.18358135223388672, 0.023774465546011925, -0.05291692540049553, 0.013881206512451172, 0.0013482654467225075, -0.06351982802152634, -0.013056534342467785, 0.018374785780906677, -0.11782747507095337, -0.05090685561299324, -0.03275580331683159, 0.07783249765634537, -0.16030682623386383, -0.0877394899725914, 0.11273340880870819, 0.03247608616948128, 0.1116083636879921, 0.11100348085165024, 0.0029238115530461073, 0.09652310609817505, -0.08867620676755905, -0.10137758404016495, 0.028416428714990616, 0.05850553140044212, -0.004717225208878517, 0.0338771790266037, -0.08107476681470871, 0.0965980663895607, -0.08487499505281448, 0.0017951868940144777, -0.03430997580289841, -0.11891184002161026, -0.1062634065747261, -0.07948566228151321, -0.1201784610748291, 0.039140596985816956, -0.16233326494693756, 0.17395327985286713, 0.09610553085803986, 0.10544848442077637, 0.07115017622709274, -0.017539754509925842, -0.051834944635629654, 0.0012812841450795531, -0.03794896602630615, -0.046503376215696335, -0.12840472161769867, 0.03780418261885643, -0.07327639311552048, -0.09395363926887512, 0.3460042476654053, -0.03070560283958912, -0.12798358500003815, 0.048227906227111816, 0.14436577260494232, 0.05600818246603012, -0.00208035996183753, 0.2746630012989044, 0.046842265874147415, 0.03356502205133438, -0.05823233723640442, 0.006521868985146284, 0.05556127429008484, -0.07734274864196777, 0.005020815413445234, 0.05489637702703476, 0.12683485448360443, 0.04395321011543274, 0.04137979447841644, -0.1202191486954689, -0.02280261740088463, 0.010061034001410007, 0.08806835114955902, 0.06402159482240677, 0.03048405982553959, 0.09209851920604706, 0.11836949735879898, -0.03152010589838028, -0.016634559258818626, -0.03362197056412697, 0.015894491225481033, -0.16492749750614166, -0.13232536613941193, -0.02111920900642872, -0.1728866845369339, -0.0008740238845348358, -0.003787984373047948, -0.04413442686200142, 0.26616278290748596, 0.05202426016330719, -0.014622442424297333, -0.0725850760936737, -0.14192940294742584, 0.008088779635727406, -0.0745602697134018, -0.025967802852392197, -0.0486544668674469, 0.029924146831035614, -0.07588427513837814, 0.02722756192088127, -0.05634620040655136, -0.06126495078206062, 0.059751976281404495, 0.09817507117986679, 0.1141197606921196, -0.06400559097528458, -0.037795860320329666, -0.12780417501926422, 0.005348340142518282, -0.031182952225208282, 0.19150452315807343, 0.0732182040810585, 0.07031357288360596, 0.11209698021411896, 0.07329583913087845, -0.047418445348739624, -0.11615771055221558, -0.050271112471818924, -0.035710208117961884, -0.03748736158013344, 0.04092748463153839, -0.03058774583041668, -0.03980829566717148, -0.04853656142950058, 0.19831372797489166, 0.26129576563835144, -0.08521967381238937, -0.0005671381950378418, -0.0002930442860815674, 0.003739734645932913, 0.00204873806796968, 0.1472928375005722, 0.05853525921702385, 0.11207357794046402, -0.051810771226882935, -0.0021520762238651514, -0.04380796104669571, -0.02876044623553753, -0.16162092983722687, 0.07960914820432663, -0.03894373029470444, -0.10289120674133301, -0.030670279636979103, 0.14215749502182007, -0.06743727624416351, 0.07221474498510361, 0.06153428182005882, -0.055546604096889496, -0.0187344029545784, -0.01849968172609806, 0.15872111916542053, 0.0667259618639946, 0.006397204007953405, -0.11801804602146149, 0.009210037998855114, 0.04581350460648537, -0.041233129799366, -0.30970680713653564, -0.18251237273216248, 0.08559861034154892, 0.09826260805130005, 0.2934248149394989, 0.01480457279831171, 0.1105048730969429, 0.012399069964885712, 0.023192690685391426, -0.17247705161571503, 0.06305760145187378, 0.02640485018491745, -0.04569758474826813, -0.06197669357061386, -0.21027415990829468, -0.15482737123966217, -0.012660115025937557, 0.07564311474561691, 0.07467310130596161, -0.013488510623574257, 0.1677602231502533, -0.04988284036517143, -0.08345402032136917, -0.02381213940680027, -0.11725359410047531, 0.09443474560976028, -0.06419520080089569, -0.07034330070018768, -0.07380110025405884, -0.04098692163825035, 0.006217553745955229, 0.03104177676141262, -0.26473453640937805, -0.035998400300741196, 0.14492513239383698, -0.0039027442689985037, 0.11870933324098587, 0.06679368764162064, 0.06207719072699547, 0.021644730120897293, -0.05295679718255997, -0.00618883827701211, -0.061871565878391266, 0.044328924268484116, 0.07077302038669586, -0.0393492691218853, 0.0051862443797290325, -0.09827139228582382, 0.04674050584435463, -0.040249425917863846, -0.05528632551431656, -0.1310999095439911 ]
04a7e97622c5a623f86cb8a5b87acfb5f790e46a
# Dataset Card for Evaluation run of nbeerbower/bruphin-epsilon <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [nbeerbower/bruphin-epsilon](https://huggingface.co/nbeerbower/bruphin-epsilon) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_nbeerbower__bruphin-epsilon", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-01-25T05:49:20.264803](https://huggingface.co/datasets/open-llm-leaderboard/details_nbeerbower__bruphin-epsilon/blob/main/results_2024-01-25T05-49-20.264803.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.6562434752388866, "acc_stderr": 0.03198900028362337, "acc_norm": 0.6555271311464355, "acc_norm_stderr": 0.0326584820786784, "mc1": 0.5275397796817626, "mc1_stderr": 0.017476930190712187, "mc2": 0.669482738361527, "mc2_stderr": 0.01527115945822096 }, "harness|arc:challenge|25": { "acc": 0.6996587030716723, "acc_stderr": 0.013395909309957004, "acc_norm": 0.7209897610921502, "acc_norm_stderr": 0.013106784883601327 }, "harness|hellaswag|10": { "acc": 0.7137024497112129, "acc_stderr": 0.0045110633512787015, "acc_norm": 0.8809002190798646, "acc_norm_stderr": 0.003232439139881554 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.34, "acc_stderr": 0.04760952285695235, "acc_norm": 0.34, "acc_norm_stderr": 0.04760952285695235 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.6592592592592592, "acc_stderr": 0.040943762699967926, "acc_norm": 0.6592592592592592, "acc_norm_stderr": 0.040943762699967926 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.6907894736842105, "acc_stderr": 0.037610708698674805, "acc_norm": 0.6907894736842105, "acc_norm_stderr": 0.037610708698674805 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.65, "acc_stderr": 0.0479372485441102, "acc_norm": 0.65, "acc_norm_stderr": 0.0479372485441102 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.7132075471698113, "acc_stderr": 0.027834912527544064, "acc_norm": 0.7132075471698113, "acc_norm_stderr": 0.027834912527544064 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.7708333333333334, "acc_stderr": 0.03514697467862388, "acc_norm": 0.7708333333333334, "acc_norm_stderr": 0.03514697467862388 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.48, "acc_stderr": 0.050211673156867795, "acc_norm": 0.48, "acc_norm_stderr": 0.050211673156867795 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.52, "acc_stderr": 0.050211673156867795, "acc_norm": 0.52, "acc_norm_stderr": 0.050211673156867795 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.34, "acc_stderr": 0.04760952285695235, "acc_norm": 0.34, "acc_norm_stderr": 0.04760952285695235 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.6647398843930635, "acc_stderr": 0.03599586301247077, "acc_norm": 0.6647398843930635, "acc_norm_stderr": 0.03599586301247077 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.43137254901960786, "acc_stderr": 0.04928099597287534, "acc_norm": 0.43137254901960786, "acc_norm_stderr": 0.04928099597287534 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.76, "acc_stderr": 0.04292346959909283, "acc_norm": 0.76, "acc_norm_stderr": 0.04292346959909283 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.5787234042553191, "acc_stderr": 0.03227834510146267, "acc_norm": 0.5787234042553191, "acc_norm_stderr": 0.03227834510146267 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.5087719298245614, "acc_stderr": 0.04702880432049615, "acc_norm": 0.5087719298245614, "acc_norm_stderr": 0.04702880432049615 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.5517241379310345, "acc_stderr": 0.04144311810878152, "acc_norm": 0.5517241379310345, "acc_norm_stderr": 0.04144311810878152 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.42857142857142855, "acc_stderr": 0.02548718714785938, "acc_norm": 0.42857142857142855, "acc_norm_stderr": 0.02548718714785938 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.46825396825396826, "acc_stderr": 0.04463112720677172, "acc_norm": 0.46825396825396826, "acc_norm_stderr": 0.04463112720677172 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.32, "acc_stderr": 0.046882617226215034, "acc_norm": 0.32, "acc_norm_stderr": 0.046882617226215034 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.7903225806451613, "acc_stderr": 0.023157879349083522, "acc_norm": 0.7903225806451613, "acc_norm_stderr": 0.023157879349083522 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.5123152709359606, "acc_stderr": 0.035169204442208966, "acc_norm": 0.5123152709359606, "acc_norm_stderr": 0.035169204442208966 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.68, "acc_stderr": 0.04688261722621505, "acc_norm": 0.68, "acc_norm_stderr": 0.04688261722621505 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.7818181818181819, "acc_stderr": 0.03225078108306289, "acc_norm": 0.7818181818181819, "acc_norm_stderr": 0.03225078108306289 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.7828282828282829, "acc_stderr": 0.02937661648494563, "acc_norm": 0.7828282828282829, "acc_norm_stderr": 0.02937661648494563 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.917098445595855, "acc_stderr": 0.01989934131572178, "acc_norm": 0.917098445595855, "acc_norm_stderr": 0.01989934131572178 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.6615384615384615, "acc_stderr": 0.023991500500313036, "acc_norm": 0.6615384615384615, "acc_norm_stderr": 0.023991500500313036 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.337037037037037, "acc_stderr": 0.02882088466625326, "acc_norm": 0.337037037037037, "acc_norm_stderr": 0.02882088466625326 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.6974789915966386, "acc_stderr": 0.02983796238829194, "acc_norm": 0.6974789915966386, "acc_norm_stderr": 0.02983796238829194 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.3509933774834437, "acc_stderr": 0.03896981964257375, "acc_norm": 0.3509933774834437, "acc_norm_stderr": 0.03896981964257375 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.8440366972477065, "acc_stderr": 0.015555802713590172, "acc_norm": 0.8440366972477065, "acc_norm_stderr": 0.015555802713590172 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.5, "acc_stderr": 0.034099716973523674, "acc_norm": 0.5, "acc_norm_stderr": 0.034099716973523674 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.8480392156862745, "acc_stderr": 0.025195658428931792, "acc_norm": 0.8480392156862745, "acc_norm_stderr": 0.025195658428931792 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.810126582278481, "acc_stderr": 0.02553010046023349, "acc_norm": 0.810126582278481, "acc_norm_stderr": 0.02553010046023349 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.6905829596412556, "acc_stderr": 0.03102441174057221, "acc_norm": 0.6905829596412556, "acc_norm_stderr": 0.03102441174057221 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.7938931297709924, "acc_stderr": 0.03547771004159464, "acc_norm": 0.7938931297709924, "acc_norm_stderr": 0.03547771004159464 }, "harness|hendrycksTest-international_law|5": { "acc": 0.7933884297520661, "acc_stderr": 0.03695980128098824, "acc_norm": 0.7933884297520661, "acc_norm_stderr": 0.03695980128098824 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.7870370370370371, "acc_stderr": 0.0395783547198098, "acc_norm": 0.7870370370370371, "acc_norm_stderr": 0.0395783547198098 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.7791411042944786, "acc_stderr": 0.03259177392742178, "acc_norm": 0.7791411042944786, "acc_norm_stderr": 0.03259177392742178 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.45535714285714285, "acc_stderr": 0.047268355537191, "acc_norm": 0.45535714285714285, "acc_norm_stderr": 0.047268355537191 }, "harness|hendrycksTest-management|5": { "acc": 0.7669902912621359, "acc_stderr": 0.04185832598928315, "acc_norm": 0.7669902912621359, "acc_norm_stderr": 0.04185832598928315 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8803418803418803, "acc_stderr": 0.021262719400406964, "acc_norm": 0.8803418803418803, "acc_norm_stderr": 0.021262719400406964 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.74, "acc_stderr": 0.04408440022768078, "acc_norm": 0.74, "acc_norm_stderr": 0.04408440022768078 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.8263090676883781, "acc_stderr": 0.01354741565866226, "acc_norm": 0.8263090676883781, "acc_norm_stderr": 0.01354741565866226 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.7543352601156069, "acc_stderr": 0.023176298203992002, "acc_norm": 0.7543352601156069, "acc_norm_stderr": 0.023176298203992002 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.42905027932960893, "acc_stderr": 0.016553287863116037, "acc_norm": 0.42905027932960893, "acc_norm_stderr": 0.016553287863116037 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.7124183006535948, "acc_stderr": 0.02591780611714716, "acc_norm": 0.7124183006535948, "acc_norm_stderr": 0.02591780611714716 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.707395498392283, "acc_stderr": 0.02583989833487798, "acc_norm": 0.707395498392283, "acc_norm_stderr": 0.02583989833487798 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.7592592592592593, "acc_stderr": 0.023788583551658533, "acc_norm": 0.7592592592592593, "acc_norm_stderr": 0.023788583551658533 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.48936170212765956, "acc_stderr": 0.02982074719142248, "acc_norm": 0.48936170212765956, "acc_norm_stderr": 0.02982074719142248 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.46936114732724904, "acc_stderr": 0.012746237711716634, "acc_norm": 0.46936114732724904, "acc_norm_stderr": 0.012746237711716634 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.6654411764705882, "acc_stderr": 0.028661996202335303, "acc_norm": 0.6654411764705882, "acc_norm_stderr": 0.028661996202335303 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.6748366013071896, "acc_stderr": 0.018950886770806315, "acc_norm": 0.6748366013071896, "acc_norm_stderr": 0.018950886770806315 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6818181818181818, "acc_stderr": 0.04461272175910509, "acc_norm": 0.6818181818181818, "acc_norm_stderr": 0.04461272175910509 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.7346938775510204, "acc_stderr": 0.0282638899437846, "acc_norm": 0.7346938775510204, "acc_norm_stderr": 0.0282638899437846 }, "harness|hendrycksTest-sociology|5": { "acc": 0.8407960199004975, "acc_stderr": 0.025870646766169136, "acc_norm": 0.8407960199004975, "acc_norm_stderr": 0.025870646766169136 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.85, "acc_stderr": 0.0358870281282637, "acc_norm": 0.85, "acc_norm_stderr": 0.0358870281282637 }, "harness|hendrycksTest-virology|5": { "acc": 0.5662650602409639, "acc_stderr": 0.03858158940685516, "acc_norm": 0.5662650602409639, "acc_norm_stderr": 0.03858158940685516 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.8362573099415205, "acc_stderr": 0.028380919596145866, "acc_norm": 0.8362573099415205, "acc_norm_stderr": 0.028380919596145866 }, "harness|truthfulqa:mc|0": { "mc1": 0.5275397796817626, "mc1_stderr": 0.017476930190712187, "mc2": 0.669482738361527, "mc2_stderr": 0.01527115945822096 }, "harness|winogrande|5": { "acc": 0.8382004735595896, "acc_stderr": 0.010350128010292404 }, "harness|gsm8k|5": { "acc": 0.7050796057619408, "acc_stderr": 0.012560698010954772 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_nbeerbower__bruphin-epsilon
[ "region:us" ]
2024-01-25T05:51:42+00:00
{"pretty_name": "Evaluation run of nbeerbower/bruphin-epsilon", "dataset_summary": "Dataset automatically created during the evaluation run of model [nbeerbower/bruphin-epsilon](https://huggingface.co/nbeerbower/bruphin-epsilon) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_nbeerbower__bruphin-epsilon\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-01-25T05:49:20.264803](https://huggingface.co/datasets/open-llm-leaderboard/details_nbeerbower__bruphin-epsilon/blob/main/results_2024-01-25T05-49-20.264803.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.6562434752388866,\n \"acc_stderr\": 0.03198900028362337,\n \"acc_norm\": 0.6555271311464355,\n \"acc_norm_stderr\": 0.0326584820786784,\n \"mc1\": 0.5275397796817626,\n \"mc1_stderr\": 0.017476930190712187,\n \"mc2\": 0.669482738361527,\n \"mc2_stderr\": 0.01527115945822096\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.6996587030716723,\n \"acc_stderr\": 0.013395909309957004,\n \"acc_norm\": 0.7209897610921502,\n \"acc_norm_stderr\": 0.013106784883601327\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.7137024497112129,\n \"acc_stderr\": 0.0045110633512787015,\n \"acc_norm\": 0.8809002190798646,\n \"acc_norm_stderr\": 0.003232439139881554\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.34,\n \"acc_stderr\": 0.04760952285695235,\n \"acc_norm\": 0.34,\n \"acc_norm_stderr\": 0.04760952285695235\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.6592592592592592,\n \"acc_stderr\": 0.040943762699967926,\n \"acc_norm\": 0.6592592592592592,\n \"acc_norm_stderr\": 0.040943762699967926\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.6907894736842105,\n \"acc_stderr\": 0.037610708698674805,\n \"acc_norm\": 0.6907894736842105,\n \"acc_norm_stderr\": 0.037610708698674805\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.65,\n \"acc_stderr\": 0.0479372485441102,\n \"acc_norm\": 0.65,\n \"acc_norm_stderr\": 0.0479372485441102\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.7132075471698113,\n \"acc_stderr\": 0.027834912527544064,\n \"acc_norm\": 0.7132075471698113,\n \"acc_norm_stderr\": 0.027834912527544064\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.7708333333333334,\n \"acc_stderr\": 0.03514697467862388,\n \"acc_norm\": 0.7708333333333334,\n \"acc_norm_stderr\": 0.03514697467862388\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.48,\n \"acc_stderr\": 0.050211673156867795,\n \"acc_norm\": 0.48,\n \"acc_norm_stderr\": 0.050211673156867795\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.52,\n \"acc_stderr\": 0.050211673156867795,\n \"acc_norm\": 0.52,\n \"acc_norm_stderr\": 0.050211673156867795\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.34,\n \"acc_stderr\": 0.04760952285695235,\n \"acc_norm\": 0.34,\n \"acc_norm_stderr\": 0.04760952285695235\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.6647398843930635,\n \"acc_stderr\": 0.03599586301247077,\n \"acc_norm\": 0.6647398843930635,\n \"acc_norm_stderr\": 0.03599586301247077\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.43137254901960786,\n \"acc_stderr\": 0.04928099597287534,\n \"acc_norm\": 0.43137254901960786,\n \"acc_norm_stderr\": 0.04928099597287534\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.76,\n \"acc_stderr\": 0.04292346959909283,\n \"acc_norm\": 0.76,\n \"acc_norm_stderr\": 0.04292346959909283\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.5787234042553191,\n \"acc_stderr\": 0.03227834510146267,\n \"acc_norm\": 0.5787234042553191,\n \"acc_norm_stderr\": 0.03227834510146267\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.5087719298245614,\n \"acc_stderr\": 0.04702880432049615,\n \"acc_norm\": 0.5087719298245614,\n \"acc_norm_stderr\": 0.04702880432049615\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.5517241379310345,\n \"acc_stderr\": 0.04144311810878152,\n \"acc_norm\": 0.5517241379310345,\n \"acc_norm_stderr\": 0.04144311810878152\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.42857142857142855,\n \"acc_stderr\": 0.02548718714785938,\n \"acc_norm\": 0.42857142857142855,\n \"acc_norm_stderr\": 0.02548718714785938\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.46825396825396826,\n \"acc_stderr\": 0.04463112720677172,\n \"acc_norm\": 0.46825396825396826,\n \"acc_norm_stderr\": 0.04463112720677172\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.32,\n \"acc_stderr\": 0.046882617226215034,\n \"acc_norm\": 0.32,\n \"acc_norm_stderr\": 0.046882617226215034\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.7903225806451613,\n \"acc_stderr\": 0.023157879349083522,\n \"acc_norm\": 0.7903225806451613,\n \"acc_norm_stderr\": 0.023157879349083522\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.5123152709359606,\n \"acc_stderr\": 0.035169204442208966,\n \"acc_norm\": 0.5123152709359606,\n \"acc_norm_stderr\": 0.035169204442208966\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.68,\n \"acc_stderr\": 0.04688261722621505,\n \"acc_norm\": 0.68,\n \"acc_norm_stderr\": 0.04688261722621505\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.7818181818181819,\n \"acc_stderr\": 0.03225078108306289,\n \"acc_norm\": 0.7818181818181819,\n \"acc_norm_stderr\": 0.03225078108306289\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.7828282828282829,\n \"acc_stderr\": 0.02937661648494563,\n \"acc_norm\": 0.7828282828282829,\n \"acc_norm_stderr\": 0.02937661648494563\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.917098445595855,\n \"acc_stderr\": 0.01989934131572178,\n \"acc_norm\": 0.917098445595855,\n \"acc_norm_stderr\": 0.01989934131572178\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.6615384615384615,\n \"acc_stderr\": 0.023991500500313036,\n \"acc_norm\": 0.6615384615384615,\n \"acc_norm_stderr\": 0.023991500500313036\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.337037037037037,\n \"acc_stderr\": 0.02882088466625326,\n \"acc_norm\": 0.337037037037037,\n \"acc_norm_stderr\": 0.02882088466625326\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.6974789915966386,\n \"acc_stderr\": 0.02983796238829194,\n \"acc_norm\": 0.6974789915966386,\n \"acc_norm_stderr\": 0.02983796238829194\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.3509933774834437,\n \"acc_stderr\": 0.03896981964257375,\n \"acc_norm\": 0.3509933774834437,\n \"acc_norm_stderr\": 0.03896981964257375\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.8440366972477065,\n \"acc_stderr\": 0.015555802713590172,\n \"acc_norm\": 0.8440366972477065,\n \"acc_norm_stderr\": 0.015555802713590172\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.5,\n \"acc_stderr\": 0.034099716973523674,\n \"acc_norm\": 0.5,\n \"acc_norm_stderr\": 0.034099716973523674\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.8480392156862745,\n \"acc_stderr\": 0.025195658428931792,\n \"acc_norm\": 0.8480392156862745,\n \"acc_norm_stderr\": 0.025195658428931792\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.810126582278481,\n \"acc_stderr\": 0.02553010046023349,\n \"acc_norm\": 0.810126582278481,\n \"acc_norm_stderr\": 0.02553010046023349\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.6905829596412556,\n \"acc_stderr\": 0.03102441174057221,\n \"acc_norm\": 0.6905829596412556,\n \"acc_norm_stderr\": 0.03102441174057221\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.7938931297709924,\n \"acc_stderr\": 0.03547771004159464,\n \"acc_norm\": 0.7938931297709924,\n \"acc_norm_stderr\": 0.03547771004159464\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.7933884297520661,\n \"acc_stderr\": 0.03695980128098824,\n \"acc_norm\": 0.7933884297520661,\n \"acc_norm_stderr\": 0.03695980128098824\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.7870370370370371,\n \"acc_stderr\": 0.0395783547198098,\n \"acc_norm\": 0.7870370370370371,\n \"acc_norm_stderr\": 0.0395783547198098\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.7791411042944786,\n \"acc_stderr\": 0.03259177392742178,\n \"acc_norm\": 0.7791411042944786,\n \"acc_norm_stderr\": 0.03259177392742178\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.45535714285714285,\n \"acc_stderr\": 0.047268355537191,\n \"acc_norm\": 0.45535714285714285,\n \"acc_norm_stderr\": 0.047268355537191\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.7669902912621359,\n \"acc_stderr\": 0.04185832598928315,\n \"acc_norm\": 0.7669902912621359,\n \"acc_norm_stderr\": 0.04185832598928315\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8803418803418803,\n \"acc_stderr\": 0.021262719400406964,\n \"acc_norm\": 0.8803418803418803,\n \"acc_norm_stderr\": 0.021262719400406964\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.74,\n \"acc_stderr\": 0.04408440022768078,\n \"acc_norm\": 0.74,\n \"acc_norm_stderr\": 0.04408440022768078\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.8263090676883781,\n \"acc_stderr\": 0.01354741565866226,\n \"acc_norm\": 0.8263090676883781,\n \"acc_norm_stderr\": 0.01354741565866226\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.7543352601156069,\n \"acc_stderr\": 0.023176298203992002,\n \"acc_norm\": 0.7543352601156069,\n \"acc_norm_stderr\": 0.023176298203992002\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.42905027932960893,\n \"acc_stderr\": 0.016553287863116037,\n \"acc_norm\": 0.42905027932960893,\n \"acc_norm_stderr\": 0.016553287863116037\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.7124183006535948,\n \"acc_stderr\": 0.02591780611714716,\n \"acc_norm\": 0.7124183006535948,\n \"acc_norm_stderr\": 0.02591780611714716\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.707395498392283,\n \"acc_stderr\": 0.02583989833487798,\n \"acc_norm\": 0.707395498392283,\n \"acc_norm_stderr\": 0.02583989833487798\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.7592592592592593,\n \"acc_stderr\": 0.023788583551658533,\n \"acc_norm\": 0.7592592592592593,\n \"acc_norm_stderr\": 0.023788583551658533\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.48936170212765956,\n \"acc_stderr\": 0.02982074719142248,\n \"acc_norm\": 0.48936170212765956,\n \"acc_norm_stderr\": 0.02982074719142248\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.46936114732724904,\n \"acc_stderr\": 0.012746237711716634,\n \"acc_norm\": 0.46936114732724904,\n \"acc_norm_stderr\": 0.012746237711716634\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.6654411764705882,\n \"acc_stderr\": 0.028661996202335303,\n \"acc_norm\": 0.6654411764705882,\n \"acc_norm_stderr\": 0.028661996202335303\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.6748366013071896,\n \"acc_stderr\": 0.018950886770806315,\n \"acc_norm\": 0.6748366013071896,\n \"acc_norm_stderr\": 0.018950886770806315\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6818181818181818,\n \"acc_stderr\": 0.04461272175910509,\n \"acc_norm\": 0.6818181818181818,\n \"acc_norm_stderr\": 0.04461272175910509\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.7346938775510204,\n \"acc_stderr\": 0.0282638899437846,\n \"acc_norm\": 0.7346938775510204,\n \"acc_norm_stderr\": 0.0282638899437846\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.8407960199004975,\n \"acc_stderr\": 0.025870646766169136,\n \"acc_norm\": 0.8407960199004975,\n \"acc_norm_stderr\": 0.025870646766169136\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.85,\n \"acc_stderr\": 0.0358870281282637,\n \"acc_norm\": 0.85,\n \"acc_norm_stderr\": 0.0358870281282637\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5662650602409639,\n \"acc_stderr\": 0.03858158940685516,\n \"acc_norm\": 0.5662650602409639,\n \"acc_norm_stderr\": 0.03858158940685516\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.8362573099415205,\n \"acc_stderr\": 0.028380919596145866,\n \"acc_norm\": 0.8362573099415205,\n \"acc_norm_stderr\": 0.028380919596145866\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.5275397796817626,\n \"mc1_stderr\": 0.017476930190712187,\n \"mc2\": 0.669482738361527,\n \"mc2_stderr\": 0.01527115945822096\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.8382004735595896,\n \"acc_stderr\": 0.010350128010292404\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.7050796057619408,\n \"acc_stderr\": 0.012560698010954772\n }\n}\n```", "repo_url": "https://huggingface.co/nbeerbower/bruphin-epsilon", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_01_25T05_49_20.264803", "path": ["**/details_harness|arc:challenge|25_2024-01-25T05-49-20.264803.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-01-25T05-49-20.264803.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_01_25T05_49_20.264803", "path": ["**/details_harness|gsm8k|5_2024-01-25T05-49-20.264803.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-01-25T05-49-20.264803.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_01_25T05_49_20.264803", "path": ["**/details_harness|hellaswag|10_2024-01-25T05-49-20.264803.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-01-25T05-49-20.264803.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_01_25T05_49_20.264803", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-25T05-49-20.264803.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-25T05-49-20.264803.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-25T05-49-20.264803.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-25T05-49-20.264803.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-25T05-49-20.264803.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-25T05-49-20.264803.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-25T05-49-20.264803.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-25T05-49-20.264803.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-25T05-49-20.264803.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-25T05-49-20.264803.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-25T05-49-20.264803.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-25T05-49-20.264803.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-25T05-49-20.264803.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-25T05-49-20.264803.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-25T05-49-20.264803.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-25T05-49-20.264803.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-25T05-49-20.264803.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-25T05-49-20.264803.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-25T05-49-20.264803.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-25T05-49-20.264803.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-25T05-49-20.264803.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-25T05-49-20.264803.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-25T05-49-20.264803.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-25T05-49-20.264803.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-25T05-49-20.264803.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-25T05-49-20.264803.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-25T05-49-20.264803.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-25T05-49-20.264803.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-25T05-49-20.264803.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-25T05-49-20.264803.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-25T05-49-20.264803.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-25T05-49-20.264803.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-25T05-49-20.264803.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-25T05-49-20.264803.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-25T05-49-20.264803.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-25T05-49-20.264803.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-25T05-49-20.264803.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-25T05-49-20.264803.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-25T05-49-20.264803.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-25T05-49-20.264803.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-25T05-49-20.264803.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-25T05-49-20.264803.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-25T05-49-20.264803.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-25T05-49-20.264803.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-25T05-49-20.264803.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-25T05-49-20.264803.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-25T05-49-20.264803.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-25T05-49-20.264803.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-25T05-49-20.264803.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-25T05-49-20.264803.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-25T05-49-20.264803.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-25T05-49-20.264803.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-25T05-49-20.264803.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-25T05-49-20.264803.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-25T05-49-20.264803.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-25T05-49-20.264803.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-25T05-49-20.264803.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-25T05-49-20.264803.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-25T05-49-20.264803.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-25T05-49-20.264803.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-25T05-49-20.264803.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-25T05-49-20.264803.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-25T05-49-20.264803.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-25T05-49-20.264803.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-25T05-49-20.264803.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-25T05-49-20.264803.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-25T05-49-20.264803.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-25T05-49-20.264803.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-25T05-49-20.264803.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-25T05-49-20.264803.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-25T05-49-20.264803.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-25T05-49-20.264803.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-25T05-49-20.264803.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-25T05-49-20.264803.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-25T05-49-20.264803.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-25T05-49-20.264803.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-25T05-49-20.264803.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-25T05-49-20.264803.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-25T05-49-20.264803.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-25T05-49-20.264803.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-25T05-49-20.264803.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-25T05-49-20.264803.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-25T05-49-20.264803.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-25T05-49-20.264803.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-25T05-49-20.264803.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-25T05-49-20.264803.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-25T05-49-20.264803.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-25T05-49-20.264803.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-25T05-49-20.264803.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-25T05-49-20.264803.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-25T05-49-20.264803.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-25T05-49-20.264803.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-25T05-49-20.264803.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-25T05-49-20.264803.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-25T05-49-20.264803.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-25T05-49-20.264803.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-25T05-49-20.264803.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-25T05-49-20.264803.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-25T05-49-20.264803.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-25T05-49-20.264803.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-25T05-49-20.264803.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-25T05-49-20.264803.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-25T05-49-20.264803.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-25T05-49-20.264803.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-25T05-49-20.264803.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-25T05-49-20.264803.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-25T05-49-20.264803.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-25T05-49-20.264803.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-25T05-49-20.264803.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-25T05-49-20.264803.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-25T05-49-20.264803.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-25T05-49-20.264803.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-25T05-49-20.264803.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-25T05-49-20.264803.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_01_25T05_49_20.264803", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-25T05-49-20.264803.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-25T05-49-20.264803.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_01_25T05_49_20.264803", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-25T05-49-20.264803.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-25T05-49-20.264803.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_01_25T05_49_20.264803", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-25T05-49-20.264803.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-25T05-49-20.264803.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_01_25T05_49_20.264803", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-25T05-49-20.264803.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-25T05-49-20.264803.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_01_25T05_49_20.264803", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-25T05-49-20.264803.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-25T05-49-20.264803.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_01_25T05_49_20.264803", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-25T05-49-20.264803.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-25T05-49-20.264803.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_01_25T05_49_20.264803", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-25T05-49-20.264803.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-25T05-49-20.264803.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_01_25T05_49_20.264803", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-25T05-49-20.264803.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-25T05-49-20.264803.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_01_25T05_49_20.264803", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-25T05-49-20.264803.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-25T05-49-20.264803.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_01_25T05_49_20.264803", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-25T05-49-20.264803.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-25T05-49-20.264803.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_01_25T05_49_20.264803", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-25T05-49-20.264803.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-25T05-49-20.264803.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_01_25T05_49_20.264803", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-25T05-49-20.264803.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-25T05-49-20.264803.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_01_25T05_49_20.264803", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-25T05-49-20.264803.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-25T05-49-20.264803.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_01_25T05_49_20.264803", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-25T05-49-20.264803.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-25T05-49-20.264803.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_01_25T05_49_20.264803", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-25T05-49-20.264803.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-25T05-49-20.264803.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_01_25T05_49_20.264803", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-25T05-49-20.264803.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-25T05-49-20.264803.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_01_25T05_49_20.264803", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-25T05-49-20.264803.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-25T05-49-20.264803.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_01_25T05_49_20.264803", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-25T05-49-20.264803.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-25T05-49-20.264803.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_01_25T05_49_20.264803", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-25T05-49-20.264803.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-25T05-49-20.264803.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_01_25T05_49_20.264803", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-25T05-49-20.264803.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-25T05-49-20.264803.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_01_25T05_49_20.264803", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-25T05-49-20.264803.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-25T05-49-20.264803.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_01_25T05_49_20.264803", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-25T05-49-20.264803.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-25T05-49-20.264803.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_01_25T05_49_20.264803", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-25T05-49-20.264803.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-25T05-49-20.264803.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_01_25T05_49_20.264803", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-25T05-49-20.264803.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-25T05-49-20.264803.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_01_25T05_49_20.264803", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-25T05-49-20.264803.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-25T05-49-20.264803.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_01_25T05_49_20.264803", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-25T05-49-20.264803.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-25T05-49-20.264803.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_01_25T05_49_20.264803", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-25T05-49-20.264803.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-25T05-49-20.264803.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_01_25T05_49_20.264803", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-25T05-49-20.264803.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-25T05-49-20.264803.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_01_25T05_49_20.264803", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-25T05-49-20.264803.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-25T05-49-20.264803.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_01_25T05_49_20.264803", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-25T05-49-20.264803.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-25T05-49-20.264803.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_01_25T05_49_20.264803", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-25T05-49-20.264803.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-25T05-49-20.264803.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_01_25T05_49_20.264803", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-25T05-49-20.264803.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-25T05-49-20.264803.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_01_25T05_49_20.264803", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-25T05-49-20.264803.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-25T05-49-20.264803.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_01_25T05_49_20.264803", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-25T05-49-20.264803.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-25T05-49-20.264803.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_01_25T05_49_20.264803", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-25T05-49-20.264803.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-25T05-49-20.264803.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_01_25T05_49_20.264803", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-25T05-49-20.264803.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-25T05-49-20.264803.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_01_25T05_49_20.264803", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-25T05-49-20.264803.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-25T05-49-20.264803.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_01_25T05_49_20.264803", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-25T05-49-20.264803.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-25T05-49-20.264803.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_01_25T05_49_20.264803", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-25T05-49-20.264803.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-25T05-49-20.264803.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_01_25T05_49_20.264803", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-25T05-49-20.264803.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-25T05-49-20.264803.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_01_25T05_49_20.264803", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-25T05-49-20.264803.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-25T05-49-20.264803.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_01_25T05_49_20.264803", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-25T05-49-20.264803.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-25T05-49-20.264803.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_01_25T05_49_20.264803", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-25T05-49-20.264803.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-25T05-49-20.264803.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_01_25T05_49_20.264803", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-25T05-49-20.264803.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-25T05-49-20.264803.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_01_25T05_49_20.264803", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-25T05-49-20.264803.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-25T05-49-20.264803.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_01_25T05_49_20.264803", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-25T05-49-20.264803.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-25T05-49-20.264803.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_01_25T05_49_20.264803", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-25T05-49-20.264803.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-25T05-49-20.264803.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_01_25T05_49_20.264803", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-25T05-49-20.264803.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-25T05-49-20.264803.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_01_25T05_49_20.264803", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-25T05-49-20.264803.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-25T05-49-20.264803.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_01_25T05_49_20.264803", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-25T05-49-20.264803.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-25T05-49-20.264803.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_01_25T05_49_20.264803", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-25T05-49-20.264803.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-25T05-49-20.264803.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_01_25T05_49_20.264803", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-25T05-49-20.264803.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-25T05-49-20.264803.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_01_25T05_49_20.264803", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-25T05-49-20.264803.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-25T05-49-20.264803.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_01_25T05_49_20.264803", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-25T05-49-20.264803.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-25T05-49-20.264803.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_01_25T05_49_20.264803", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-25T05-49-20.264803.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-25T05-49-20.264803.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_01_25T05_49_20.264803", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-25T05-49-20.264803.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-25T05-49-20.264803.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_01_25T05_49_20.264803", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-25T05-49-20.264803.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-25T05-49-20.264803.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_01_25T05_49_20.264803", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-25T05-49-20.264803.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-25T05-49-20.264803.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_01_25T05_49_20.264803", "path": ["**/details_harness|winogrande|5_2024-01-25T05-49-20.264803.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-01-25T05-49-20.264803.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_01_25T05_49_20.264803", "path": ["results_2024-01-25T05-49-20.264803.parquet"]}, {"split": "latest", "path": ["results_2024-01-25T05-49-20.264803.parquet"]}]}]}
2024-01-25T05:52:10+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of nbeerbower/bruphin-epsilon Dataset automatically created during the evaluation run of model nbeerbower/bruphin-epsilon on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-01-25T05:49:20.264803(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of nbeerbower/bruphin-epsilon\n\n\n\nDataset automatically created during the evaluation run of model nbeerbower/bruphin-epsilon on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-25T05:49:20.264803(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of nbeerbower/bruphin-epsilon\n\n\n\nDataset automatically created during the evaluation run of model nbeerbower/bruphin-epsilon on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-25T05:49:20.264803(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ 6, 187, 68, 4, 40, 29, 3, 4, 9, 6, 5, 7, 4, 7, 10, 9, 5, 9, 8, 10, 46, 8, 7, 10, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of nbeerbower/bruphin-epsilon\n\n\n\nDataset automatically created during the evaluation run of model nbeerbower/bruphin-epsilon on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2024-01-25T05:49:20.264803(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):## Dataset Details### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:## Uses### Direct Use### Out-of-Scope Use## Dataset Structure## Dataset Creation### Curation Rationale### Source Data#### Data Collection and Processing#### Who are the source data producers?### Annotations [optional]#### Annotation process#### Who are the annotators?#### Personal and Sensitive Information## Bias, Risks, and Limitations### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:## Glossary [optional]## More Information [optional]## Dataset Card Authors [optional]## Dataset Card Contact" ]
[ -0.04791039600968361, 0.2163393795490265, -0.005642995238304138, 0.03623748943209648, 0.08038690686225891, -0.014129631221294403, 0.035486411303281784, 0.09278768301010132, 0.018111389130353928, 0.17933666706085205, -0.023204507306218147, 0.09908205270767212, 0.06706437468528748, 0.13020502030849457, 0.03037492372095585, -0.1333393007516861, 0.021570129320025444, -0.08741392195224762, 0.07280996441841125, 0.08802838623523712, 0.06421631574630737, -0.08383730053901672, 0.06271342933177948, -0.020788734778761864, 0.020363453775644302, -0.011787351220846176, -0.08122720569372177, -0.03590806573629379, 0.0919463038444519, 0.11468099802732468, 0.034658800810575485, -0.020042166113853455, 0.01415025070309639, -0.2814118266105652, 0.015294115990400314, 0.0934738963842392, -0.009079410694539547, 0.039430513978004456, 0.1355157047510147, -0.07431823760271072, 0.10083386301994324, -0.017468683421611786, 0.07280780375003815, 0.06500497460365295, -0.11290149390697479, -0.13897006213665009, -0.13321605324745178, 0.009415088221430779, 0.05274011194705963, 0.04779404401779175, -0.029679961502552032, 0.13127829134464264, -0.06147618219256401, 0.046858400106430054, 0.1142621636390686, -0.0964045599102974, -0.023526642471551895, 0.038739241659641266, 0.00377110973931849, 0.06302979588508606, -0.09621758759021759, -0.02464895322918892, 0.031184131279587746, 0.05255693942308426, 0.010309415869414806, 0.017277784645557404, -0.02950480580329895, 0.012635476887226105, -0.13511794805526733, -0.12615808844566345, 0.14007484912872314, 0.011538442224264145, -0.0461355596780777, -0.1804630160331726, -0.006862678099423647, 0.011893097311258316, 0.0018152426928281784, -0.007791922427713871, -0.005722573958337307, -0.021769896149635315, 0.0869968980550766, -0.013272443786263466, -0.10022713989019394, -0.02046598121523857, -0.010050266981124878, 0.08702145516872406, 0.01629200205206871, -0.0074973031878471375, 0.014924979768693447, 0.11309453845024109, 0.0034724953584372997, -0.06979776173830032, -0.06994464993476868, -0.05892787128686905, -0.131834015250206, -0.04316321760416031, 0.015025362372398376, -0.06405520439147949, 0.038812316954135895, 0.2382860630750656, -0.015622804872691631, 0.02941727265715599, -0.10814893245697021, 0.0073296488262712955, 0.12128151953220367, 0.05610654875636101, -0.06663060188293457, -0.05049123615026474, -0.03472890704870224, 0.023595403879880905, 0.02535371668636799, -0.01609077677130699, 0.013838116079568863, 0.06085674464702606, 0.023649808019399643, 0.10998441278934479, 0.11819714307785034, 0.03612431511282921, -0.07493555545806885, -0.023523740470409393, 0.23649170994758606, -0.13862168788909912, -0.01217632181942463, 0.025667445734143257, -0.04239308089017868, -0.12403973937034607, 0.07121637463569641, -0.0076763806864619255, -0.047975242137908936, 0.13386063277721405, -0.04657518118619919, -0.07989567518234253, -0.0726543664932251, -0.05461706966161728, 0.05506948381662369, 0.03492327034473419, -0.04522252455353737, -0.06989045441150665, -0.07859586179256439, -0.07869493216276169, 0.03574007749557495, -0.06318933516740799, -0.029274825006723404, 0.015393655747175217, -0.004405188374221325, -0.010214338079094887, -0.0050008343532681465, 0.10891777276992798, -0.053985580801963806, 0.034910596907138824, 0.0023965120781213045, 0.019760221242904663, 0.09892038255929947, 0.043322429060935974, -0.11727530509233475, 0.06907296180725098, -0.13037796318531036, 0.10126728564500809, -0.11878418922424316, -0.014926942065358162, -0.12274029850959778, -0.010525908321142197, -0.03277416154742241, 0.03487933427095413, -0.03266449272632599, 0.07629868388175964, -0.2046399712562561, -0.002592249307781458, 0.15318521857261658, -0.11184711754322052, -0.08483019471168518, 0.08807948976755142, -0.04898128658533096, 0.05557879060506821, 0.03464620187878609, 0.0965619832277298, 0.11007246375083923, -0.0711352750658989, -0.08871032297611237, -0.06516294181346893, -0.029477518051862717, 0.1578751504421234, 0.05980880931019783, -0.0788218304514885, 0.0954413115978241, 0.052668627351522446, -0.008572202175855637, -0.06466993689537048, -0.0002819304645527154, -0.06637656688690186, -0.008339992724359035, -0.07221560925245285, -0.04248460382223129, -0.007529603783041239, -0.07876928150653839, -0.011405633762478828, -0.09053154289722443, -0.014789667911827564, 0.09074748307466507, -0.021620888262987137, 0.007294457871466875, -0.06497783958911896, 0.022860558703541756, 0.01006220281124115, 0.011940333992242813, -0.21858979761600494, -0.1004788801074028, 0.030078299343585968, -0.18229351937770844, 0.06016714870929718, 0.026341907680034637, 0.008898507803678513, 0.048804912716150284, -0.003607476130127907, 0.029508590698242188, 0.017599020153284073, -0.012854717671871185, 0.0015837829560041428, -0.14087092876434326, -0.054358333349227905, -0.0868547186255455, 0.08722024410963058, -0.15067943930625916, -0.01665574684739113, 0.060691967606544495, 0.15388309955596924, 0.02315092459321022, -0.0851108655333519, 0.05766615271568298, 0.009695546701550484, -0.04523029923439026, -0.04703595116734505, -0.0034524044021964073, -0.026930920779705048, 0.03553231060504913, 0.021195845678448677, -0.1932411789894104, -0.11634428799152374, 0.06417258828878403, 0.11254087835550308, -0.07246559858322144, -0.10425271838903427, -0.07026132196187973, -0.06588484346866608, -0.08484671264886856, -0.05869324505329132, 0.0860675573348999, 0.09492869675159454, 0.04617730528116226, -0.06296630203723907, -0.05641407519578934, 0.010484397411346436, 0.06115131825208664, -0.06354109942913055, 0.10567289590835571, 0.07506196945905685, -0.061659716069698334, 0.09917713701725006, -0.04410143196582794, 0.10397359728813171, 0.07021968811750412, 0.032240137457847595, -0.09717924147844315, 0.005734722595661879, 0.06048472225666046, 0.04573119431734085, 0.06486092507839203, -0.04966524615883827, 0.04277870059013367, 0.08175589144229889, -0.017519511282444, 0.03758130595088005, -0.057480208575725555, 0.035156864672899246, 0.04059283807873726, 0.007614253554493189, 0.025100793689489365, 0.013125138357281685, 0.012750625610351562, 0.0740957260131836, 0.01882133074104786, 0.1018911600112915, -0.018283911049365997, -0.04911963641643524, -0.09781987965106964, 0.1434195339679718, -0.08768437802791595, -0.28421980142593384, -0.17096948623657227, -0.052484650164842606, -0.04261421784758568, -0.016687825322151184, 0.059820447117090225, -0.004991066176444292, -0.10231000185012817, -0.10457269847393036, 0.047682829201221466, 0.030222542583942413, -0.13335834443569183, -0.055918700993061066, 0.05903123691678047, 0.004046290647238493, -0.16044564545154572, 0.043087176978588104, 0.0489916056394577, -0.052583999931812286, -0.003465123940259218, 0.08837461471557617, 0.11343669891357422, 0.0880734920501709, 0.06780747324228287, -0.02504754066467285, -0.005258746445178986, 0.17930355668067932, -0.10601393133401871, 0.034105245023965836, 0.09759902954101562, -0.05570385977625847, 0.06825846433639526, 0.1665237694978714, 0.020349988713860512, -0.0791231095790863, 0.05874956399202347, 0.09438906610012054, -0.06376618146896362, -0.24169352650642395, -0.1234794557094574, -0.033955007791519165, 0.0008234062697738409, 0.11643575131893158, 0.0685604140162468, 0.032744936645030975, 0.011701160110533237, -0.11142599582672119, -0.024124953895807266, -0.04690278321504593, 0.07018698751926422, 0.07905308902263641, 0.0006425636820495129, 0.04026597738265991, -0.03658858314156532, 0.018651319667696953, 0.11190672218799591, 0.04214319959282875, 0.15232615172863007, -0.04638439416885376, 0.18173986673355103, 0.09152188897132874, 0.07189832627773285, -0.0313379131257534, 0.03869717940688133, -0.009287547320127487, 0.06164972484111786, -0.009804533794522285, -0.10701268166303635, -0.05558016151189804, 0.10549101233482361, 0.029498614370822906, -0.06438135355710983, 0.035649269819259644, -0.07207804173231125, 0.03572579473257065, 0.17599202692508698, -0.03326812759041786, -0.12784647941589355, -0.0594584196805954, 0.04639163613319397, -0.027199558913707733, -0.09480348974466324, -0.004193510860204697, 0.07930733263492584, -0.14010605216026306, 0.013693536631762981, -0.041911520063877106, 0.08063740283250809, -0.12892785668373108, -0.024687152355909348, -0.010574528016149998, 0.055295541882514954, -0.001930187689140439, 0.12393029034137726, -0.13382017612457275, 0.08656985312700272, -0.006457367911934853, 0.019234605133533478, -0.10282473266124725, 0.05438121780753136, -0.03830903023481369, -0.05861615389585495, 0.13138267397880554, -0.009282446466386318, -0.09803597629070282, -0.047658614814281464, -0.10566206276416779, -0.010570862330496311, 0.05513928830623627, -0.09072268009185791, 0.09713923931121826, 0.0295790396630764, -0.027169063687324524, -0.03124859929084778, -0.02311021089553833, -0.12394212186336517, -0.2529137134552002, 0.10412178933620453, -0.1081814169883728, 0.05125977098941803, -0.063370481133461, -0.045010004192590714, -0.02985570579767227, 0.16427172720432281, -0.08787844330072403, -0.054216429591178894, -0.11300380527973175, 0.0016083475202322006, 0.18943361937999725, -0.04905908554792404, 0.06622710078954697, -0.044222235679626465, 0.18924129009246826, -0.02043743059039116, -0.04098759964108467, -0.008891862817108631, -0.09301438182592392, -0.17775771021842957, -0.05265939608216286, 0.11018973588943481, 0.08530452102422714, 0.015070577152073383, -0.0059378184378147125, 0.01372810173779726, 0.014316175132989883, -0.09843333065509796, 0.025481898337602615, 0.11665110290050507, 0.11126638948917389, 0.044310688972473145, -0.020291320979595184, -0.12912218272686005, -0.10429271310567856, -0.0920490175485611, 0.05339979752898216, 0.16243037581443787, -0.06791625916957855, 0.17624083161354065, 0.15195593237876892, -0.08972783386707306, -0.17666849493980408, -0.06317726522684097, 0.033571429550647736, -0.025055114179849625, 0.12443924695253372, -0.20570597052574158, 0.07674187421798706, 0.0655505433678627, -0.02884523943066597, 0.1273714154958725, -0.2424880862236023, -0.1330125480890274, 0.0398808978497982, 0.0376511812210083, -0.24375298619270325, -0.16309034824371338, -0.10962279140949249, -0.025943011045455933, -0.16836312413215637, 0.1309489756822586, 0.04454779624938965, 0.01948658935725689, -0.025104671716690063, 0.08744078129529953, 0.05841953307390213, -0.06929680705070496, 0.12380536645650864, -0.008299141190946102, 0.015416141599416733, -0.09635112434625626, -0.03866664320230484, 0.00588741572573781, -0.03912985697388649, 0.0699571818113327, 0.016969263553619385, 0.05338487774133682, -0.0812288299202919, -0.038225673139095306, -0.06707939505577087, 0.05237045884132385, -0.07467429339885712, -0.05358777940273285, -0.06401006877422333, 0.08727090060710907, 0.08703526854515076, -0.005510182585567236, 0.007338722236454487, -0.04944533109664917, 0.043752752244472504, 0.23060548305511475, 0.10433153063058853, 0.05843289941549301, -0.11356410384178162, -0.036414213478565216, -0.014200255274772644, -0.006993493996560574, -0.11381030082702637, 0.0456143356859684, 0.0908355563879013, 0.04246412217617035, 0.08456334471702576, -0.02282777428627014, -0.18183180689811707, -0.0060369959101080894, 0.08499517291784286, -0.09487160295248032, -0.18213970959186554, 0.02860688790678978, 0.15897199511528015, -0.1589336097240448, -0.07678083330392838, 0.0705035924911499, 0.016254941001534462, -0.03440482169389725, 0.0023368061520159245, 0.07274326682090759, 0.060180746018886566, 0.09482525289058685, 0.011299680918455124, 0.05326370894908905, -0.06886263191699982, 0.08435340225696564, 0.1404651254415512, -0.11532764136791229, 0.004902771674096584, 0.031654566526412964, -0.05876132473349571, -0.06730812042951584, -0.006381674204021692, -0.008335348218679428, 0.017604468390345573, -0.035882674157619476, 0.04119764268398285, -0.020150629803538322, 0.059991925954818726, 0.11702431738376617, -0.0017802799120545387, 0.0380597747862339, 0.010825886391103268, -0.003763183020055294, -0.06520237028598785, 0.1076737567782402, 0.027256803587079048, 0.04718302935361862, -0.03864094614982605, 0.018885428085923195, 0.013572152704000473, -0.02674528956413269, 0.020758308470249176, -0.05626050382852554, -0.058348968625068665, -0.0008116420358419418, -0.1620626449584961, 0.05676036328077316, -0.0842498242855072, 0.004979775752872229, 0.0027434974908828735, -0.01830894500017166, -0.00043337562237866223, 0.007381410803645849, -0.0747096985578537, -0.04971451312303543, -0.04222020506858826, 0.12805967032909393, -0.19454818964004517, -0.0013703665463253856, 0.09297063946723938, -0.07247994840145111, 0.07075537741184235, -0.005416782107204199, -0.02175038680434227, 0.018760839477181435, -0.10073140263557434, -0.002725037280470133, -0.028090860694646835, 0.060345444828271866, 0.01037801057100296, -0.13757947087287903, -0.011347364634275436, -0.0007527428679168224, -0.08379364013671875, -0.01746520772576332, 0.030077600851655006, -0.16241957247257233, 0.048337750136852264, 0.08073639869689941, -0.046063974499702454, -0.049379490315914154, 0.04029827564954758, 0.04572407528758049, -0.0061536915600299835, 0.10201568901538849, -0.006069044582545757, 0.035044606775045395, -0.14284193515777588, -0.0428125336766243, 0.001854220638051629, 0.013637714087963104, 0.03551722317934036, 0.021574951708316803, 0.022048084065318108, 0.005866548046469688, 0.23074151575565338, -0.014993250370025635, 0.03816338628530502, 0.01809598319232464, -0.02500775083899498, -0.03594851493835449, 0.030973609536886215, 0.033037662506103516, 0.00256212311796844, 0.018567167222499847, 0.026787225157022476, -0.0372064933180809, -0.06604291498661041, -0.02694103680551052, 0.06328608095645905, 0.13897058367729187, 0.14297574758529663, -0.04415442794561386, 0.06940755248069763, -0.1545320451259613, -0.04885247349739075, 0.02496727928519249, -0.05114879086613655, 0.049028173089027405, -0.07683561742305756, 0.06512917578220367, 0.07630009949207306, -0.0981646180152893, 0.14885061979293823, -0.051124755293130875, -0.02946205995976925, -0.025199290364980698, -0.17108610272407532, -0.03462441265583038, 0.023542651906609535, 0.007559951860457659, -0.08974888920783997, 0.11185625195503235, 0.1174313873052597, -0.00968980137258768, -0.006787797436118126, 0.08638595044612885, -0.05306810885667801, -0.0561041533946991, -0.020267214626073837, 0.003916308283805847, 0.013445248827338219, 0.010032433085143566, 0.08348944783210754, 0.01340104453265667, 0.06092347949743271, 0.07290150970220566, 0.10112644731998444, 0.033531177788972855, 0.015546713024377823, -0.03733500465750694, -0.056313179433345795, 0.0016852286644279957, -0.024423904716968536, -0.06087090075016022, 0.19945821166038513, 0.04890409857034683, 0.015113523229956627, 0.007021550089120865, 0.21518689393997192, -0.00834683421999216, -0.06625228375196457, -0.12661170959472656, 0.13145321607589722, -0.00038868188858032227, 0.021716538816690445, 0.022389424964785576, -0.11848198622465134, 0.028995906934142113, 0.15777665376663208, 0.10748734325170517, 0.047914281487464905, 0.011797605082392693, 0.04789819195866585, 0.0228513702750206, -0.025936085730791092, 0.05251157283782959, 0.02367476187646389, 0.23241671919822693, -0.05519208312034607, 0.06801315397024155, -0.011896638199687004, -0.006280135363340378, -0.01889587938785553, 0.09377539902925491, -0.03675936907529831, 0.012828594073653221, -0.06993521749973297, 0.08864496648311615, -0.06797095388174057, -0.2595530152320862, -0.010706810280680656, -0.06341373920440674, -0.13501277565956116, -0.01917249709367752, 0.02311314456164837, -0.014599360525608063, 0.038603466004133224, 0.0314411036670208, -0.035641804337501526, 0.19768184423446655, 0.000752369174733758, -0.07965109497308731, -0.051535069942474365, 0.06342054158449173, -0.03170117735862732, 0.2777617275714874, -0.001982523361220956, 0.07169702649116516, 0.09153822064399719, -0.01617893949151039, -0.1344229280948639, 0.016344917938113213, 0.08928713202476501, -0.05069536715745926, 0.062362417578697205, 0.16951638460159302, -0.015052084811031818, 0.13504505157470703, 0.03643658384680748, -0.026268014684319496, 0.07322539389133453, 0.057749971747398376, 0.04118327051401138, -0.09335638582706451, 0.0738900750875473, -0.07750950753688812, 0.13364458084106445, 0.10933135449886322, -0.015843916684389114, -0.012812932953238487, -0.05499352142214775, 0.059728723019361496, -0.02981627732515335, 0.15711626410484314, -0.01990419812500477, -0.15002107620239258, 0.04150255769491196, 0.019345078617334366, 0.05708890035748482, -0.24606937170028687, -0.056123264133930206, 0.10751655697822571, -0.049969352781772614, 0.02316959761083126, 0.09419625997543335, 0.0456995964050293, 0.026799578219652176, -0.06147283315658569, -0.08952809870243073, 0.007755796425044537, 0.11629591882228851, -0.08635883033275604, -0.03218517825007439 ]
56913c52a2474d1c225d7e74ae583c9153f2567d
# Dataset Card for Evaluation run of kwchoi/DPO_mistral_7b_ultra_0124_v1 <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [kwchoi/DPO_mistral_7b_ultra_0124_v1](https://huggingface.co/kwchoi/DPO_mistral_7b_ultra_0124_v1) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_kwchoi__DPO_mistral_7b_ultra_0124_v1", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-01-25T05:49:50.348304](https://huggingface.co/datasets/open-llm-leaderboard/details_kwchoi__DPO_mistral_7b_ultra_0124_v1/blob/main/results_2024-01-25T05-49-50.348304.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.5976042024797005, "acc_stderr": 0.03345462257965717, "acc_norm": 0.6034041935061322, "acc_norm_stderr": 0.03416858616200466, "mc1": 0.5507955936352509, "mc1_stderr": 0.017412941986115295, "mc2": 0.694525955019443, "mc2_stderr": 0.015330113605051526 }, "harness|arc:challenge|25": { "acc": 0.6305460750853242, "acc_stderr": 0.014104578366491888, "acc_norm": 0.6612627986348123, "acc_norm_stderr": 0.01383056892797433 }, "harness|hellaswag|10": { "acc": 0.6980681139215296, "acc_stderr": 0.004581576124179742, "acc_norm": 0.8638717386974706, "acc_norm_stderr": 0.0034222387022263714 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.34, "acc_stderr": 0.04760952285695235, "acc_norm": 0.34, "acc_norm_stderr": 0.04760952285695235 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.5555555555555556, "acc_stderr": 0.04292596718256981, "acc_norm": 0.5555555555555556, "acc_norm_stderr": 0.04292596718256981 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.618421052631579, "acc_stderr": 0.03953173377749194, "acc_norm": 0.618421052631579, "acc_norm_stderr": 0.03953173377749194 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.58, "acc_stderr": 0.049604496374885836, "acc_norm": 0.58, "acc_norm_stderr": 0.049604496374885836 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.6641509433962264, "acc_stderr": 0.029067220146644823, "acc_norm": 0.6641509433962264, "acc_norm_stderr": 0.029067220146644823 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.6527777777777778, "acc_stderr": 0.039812405437178615, "acc_norm": 0.6527777777777778, "acc_norm_stderr": 0.039812405437178615 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.46, "acc_stderr": 0.05009082659620333, "acc_norm": 0.46, "acc_norm_stderr": 0.05009082659620333 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.53, "acc_stderr": 0.050161355804659205, "acc_norm": 0.53, "acc_norm_stderr": 0.050161355804659205 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.39, "acc_stderr": 0.04902071300001975, "acc_norm": 0.39, "acc_norm_stderr": 0.04902071300001975 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.5838150289017341, "acc_stderr": 0.03758517775404947, "acc_norm": 0.5838150289017341, "acc_norm_stderr": 0.03758517775404947 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.49019607843137253, "acc_stderr": 0.04974229460422817, "acc_norm": 0.49019607843137253, "acc_norm_stderr": 0.04974229460422817 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.66, "acc_stderr": 0.04760952285695237, "acc_norm": 0.66, "acc_norm_stderr": 0.04760952285695237 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.5234042553191489, "acc_stderr": 0.032650194750335815, "acc_norm": 0.5234042553191489, "acc_norm_stderr": 0.032650194750335815 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.42105263157894735, "acc_stderr": 0.046446020912223177, "acc_norm": 0.42105263157894735, "acc_norm_stderr": 0.046446020912223177 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.5241379310344828, "acc_stderr": 0.0416180850350153, "acc_norm": 0.5241379310344828, "acc_norm_stderr": 0.0416180850350153 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.4021164021164021, "acc_stderr": 0.025253032554997692, "acc_norm": 0.4021164021164021, "acc_norm_stderr": 0.025253032554997692 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.38095238095238093, "acc_stderr": 0.04343525428949098, "acc_norm": 0.38095238095238093, "acc_norm_stderr": 0.04343525428949098 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.33, "acc_stderr": 0.04725815626252605, "acc_norm": 0.33, "acc_norm_stderr": 0.04725815626252605 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.6903225806451613, "acc_stderr": 0.026302774983517414, "acc_norm": 0.6903225806451613, "acc_norm_stderr": 0.026302774983517414 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.4630541871921182, "acc_stderr": 0.035083705204426656, "acc_norm": 0.4630541871921182, "acc_norm_stderr": 0.035083705204426656 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.61, "acc_stderr": 0.04902071300001974, "acc_norm": 0.61, "acc_norm_stderr": 0.04902071300001974 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.7393939393939394, "acc_stderr": 0.034277431758165236, "acc_norm": 0.7393939393939394, "acc_norm_stderr": 0.034277431758165236 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.7373737373737373, "acc_stderr": 0.03135305009533084, "acc_norm": 0.7373737373737373, "acc_norm_stderr": 0.03135305009533084 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.8290155440414507, "acc_stderr": 0.02717121368316453, "acc_norm": 0.8290155440414507, "acc_norm_stderr": 0.02717121368316453 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.5820512820512821, "acc_stderr": 0.025007329882461217, "acc_norm": 0.5820512820512821, "acc_norm_stderr": 0.025007329882461217 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.31851851851851853, "acc_stderr": 0.02840653309060846, "acc_norm": 0.31851851851851853, "acc_norm_stderr": 0.02840653309060846 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.6050420168067226, "acc_stderr": 0.03175367846096626, "acc_norm": 0.6050420168067226, "acc_norm_stderr": 0.03175367846096626 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.3443708609271523, "acc_stderr": 0.038796870240733264, "acc_norm": 0.3443708609271523, "acc_norm_stderr": 0.038796870240733264 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.7798165137614679, "acc_stderr": 0.01776597865232753, "acc_norm": 0.7798165137614679, "acc_norm_stderr": 0.01776597865232753 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.4583333333333333, "acc_stderr": 0.03398110890294636, "acc_norm": 0.4583333333333333, "acc_norm_stderr": 0.03398110890294636 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.7696078431372549, "acc_stderr": 0.029554292605695066, "acc_norm": 0.7696078431372549, "acc_norm_stderr": 0.029554292605695066 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.7468354430379747, "acc_stderr": 0.028304657943035307, "acc_norm": 0.7468354430379747, "acc_norm_stderr": 0.028304657943035307 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.6098654708520179, "acc_stderr": 0.03273766725459156, "acc_norm": 0.6098654708520179, "acc_norm_stderr": 0.03273766725459156 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.7022900763358778, "acc_stderr": 0.04010358942462203, "acc_norm": 0.7022900763358778, "acc_norm_stderr": 0.04010358942462203 }, "harness|hendrycksTest-international_law|5": { "acc": 0.7851239669421488, "acc_stderr": 0.03749492448709695, "acc_norm": 0.7851239669421488, "acc_norm_stderr": 0.03749492448709695 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.7222222222222222, "acc_stderr": 0.043300437496507416, "acc_norm": 0.7222222222222222, "acc_norm_stderr": 0.043300437496507416 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.7423312883435583, "acc_stderr": 0.03436150827846917, "acc_norm": 0.7423312883435583, "acc_norm_stderr": 0.03436150827846917 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.48214285714285715, "acc_stderr": 0.047427623612430116, "acc_norm": 0.48214285714285715, "acc_norm_stderr": 0.047427623612430116 }, "harness|hendrycksTest-management|5": { "acc": 0.7281553398058253, "acc_stderr": 0.044052680241409216, "acc_norm": 0.7281553398058253, "acc_norm_stderr": 0.044052680241409216 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8547008547008547, "acc_stderr": 0.02308663508684141, "acc_norm": 0.8547008547008547, "acc_norm_stderr": 0.02308663508684141 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.65, "acc_stderr": 0.0479372485441102, "acc_norm": 0.65, "acc_norm_stderr": 0.0479372485441102 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.7484035759897829, "acc_stderr": 0.015517322365529636, "acc_norm": 0.7484035759897829, "acc_norm_stderr": 0.015517322365529636 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.6705202312138728, "acc_stderr": 0.025305258131879702, "acc_norm": 0.6705202312138728, "acc_norm_stderr": 0.025305258131879702 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.36089385474860336, "acc_stderr": 0.016062290671110473, "acc_norm": 0.36089385474860336, "acc_norm_stderr": 0.016062290671110473 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.6535947712418301, "acc_stderr": 0.02724561304721536, "acc_norm": 0.6535947712418301, "acc_norm_stderr": 0.02724561304721536 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.6720257234726688, "acc_stderr": 0.02666441088693761, "acc_norm": 0.6720257234726688, "acc_norm_stderr": 0.02666441088693761 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.6574074074074074, "acc_stderr": 0.02640614597362568, "acc_norm": 0.6574074074074074, "acc_norm_stderr": 0.02640614597362568 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.41843971631205673, "acc_stderr": 0.02942799403941999, "acc_norm": 0.41843971631205673, "acc_norm_stderr": 0.02942799403941999 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.41590612777053454, "acc_stderr": 0.012588323850313629, "acc_norm": 0.41590612777053454, "acc_norm_stderr": 0.012588323850313629 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.5882352941176471, "acc_stderr": 0.029896163033125474, "acc_norm": 0.5882352941176471, "acc_norm_stderr": 0.029896163033125474 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.6013071895424836, "acc_stderr": 0.019808281317449848, "acc_norm": 0.6013071895424836, "acc_norm_stderr": 0.019808281317449848 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6727272727272727, "acc_stderr": 0.04494290866252091, "acc_norm": 0.6727272727272727, "acc_norm_stderr": 0.04494290866252091 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.6653061224489796, "acc_stderr": 0.030209235226242307, "acc_norm": 0.6653061224489796, "acc_norm_stderr": 0.030209235226242307 }, "harness|hendrycksTest-sociology|5": { "acc": 0.7960199004975125, "acc_stderr": 0.02849317624532607, "acc_norm": 0.7960199004975125, "acc_norm_stderr": 0.02849317624532607 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.82, "acc_stderr": 0.038612291966536934, "acc_norm": 0.82, "acc_norm_stderr": 0.038612291966536934 }, "harness|hendrycksTest-virology|5": { "acc": 0.5, "acc_stderr": 0.03892494720807614, "acc_norm": 0.5, "acc_norm_stderr": 0.03892494720807614 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.7777777777777778, "acc_stderr": 0.031885780176863984, "acc_norm": 0.7777777777777778, "acc_norm_stderr": 0.031885780176863984 }, "harness|truthfulqa:mc|0": { "mc1": 0.5507955936352509, "mc1_stderr": 0.017412941986115295, "mc2": 0.694525955019443, "mc2_stderr": 0.015330113605051526 }, "harness|winogrande|5": { "acc": 0.7947908445146015, "acc_stderr": 0.011350315707462059 }, "harness|gsm8k|5": { "acc": 0.25473843821076575, "acc_stderr": 0.012001731232879127 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_kwchoi__DPO_mistral_7b_ultra_0124_v1
[ "region:us" ]
2024-01-25T05:52:11+00:00
{"pretty_name": "Evaluation run of kwchoi/DPO_mistral_7b_ultra_0124_v1", "dataset_summary": "Dataset automatically created during the evaluation run of model [kwchoi/DPO_mistral_7b_ultra_0124_v1](https://huggingface.co/kwchoi/DPO_mistral_7b_ultra_0124_v1) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_kwchoi__DPO_mistral_7b_ultra_0124_v1\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-01-25T05:49:50.348304](https://huggingface.co/datasets/open-llm-leaderboard/details_kwchoi__DPO_mistral_7b_ultra_0124_v1/blob/main/results_2024-01-25T05-49-50.348304.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.5976042024797005,\n \"acc_stderr\": 0.03345462257965717,\n \"acc_norm\": 0.6034041935061322,\n \"acc_norm_stderr\": 0.03416858616200466,\n \"mc1\": 0.5507955936352509,\n \"mc1_stderr\": 0.017412941986115295,\n \"mc2\": 0.694525955019443,\n \"mc2_stderr\": 0.015330113605051526\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.6305460750853242,\n \"acc_stderr\": 0.014104578366491888,\n \"acc_norm\": 0.6612627986348123,\n \"acc_norm_stderr\": 0.01383056892797433\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6980681139215296,\n \"acc_stderr\": 0.004581576124179742,\n \"acc_norm\": 0.8638717386974706,\n \"acc_norm_stderr\": 0.0034222387022263714\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.34,\n \"acc_stderr\": 0.04760952285695235,\n \"acc_norm\": 0.34,\n \"acc_norm_stderr\": 0.04760952285695235\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.5555555555555556,\n \"acc_stderr\": 0.04292596718256981,\n \"acc_norm\": 0.5555555555555556,\n \"acc_norm_stderr\": 0.04292596718256981\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.618421052631579,\n \"acc_stderr\": 0.03953173377749194,\n \"acc_norm\": 0.618421052631579,\n \"acc_norm_stderr\": 0.03953173377749194\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.58,\n \"acc_stderr\": 0.049604496374885836,\n \"acc_norm\": 0.58,\n \"acc_norm_stderr\": 0.049604496374885836\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.6641509433962264,\n \"acc_stderr\": 0.029067220146644823,\n \"acc_norm\": 0.6641509433962264,\n \"acc_norm_stderr\": 0.029067220146644823\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.6527777777777778,\n \"acc_stderr\": 0.039812405437178615,\n \"acc_norm\": 0.6527777777777778,\n \"acc_norm_stderr\": 0.039812405437178615\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.46,\n \"acc_stderr\": 0.05009082659620333,\n \"acc_norm\": 0.46,\n \"acc_norm_stderr\": 0.05009082659620333\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.53,\n \"acc_stderr\": 0.050161355804659205,\n \"acc_norm\": 0.53,\n \"acc_norm_stderr\": 0.050161355804659205\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.39,\n \"acc_stderr\": 0.04902071300001975,\n \"acc_norm\": 0.39,\n \"acc_norm_stderr\": 0.04902071300001975\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.5838150289017341,\n \"acc_stderr\": 0.03758517775404947,\n \"acc_norm\": 0.5838150289017341,\n \"acc_norm_stderr\": 0.03758517775404947\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.49019607843137253,\n \"acc_stderr\": 0.04974229460422817,\n \"acc_norm\": 0.49019607843137253,\n \"acc_norm_stderr\": 0.04974229460422817\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.66,\n \"acc_stderr\": 0.04760952285695237,\n \"acc_norm\": 0.66,\n \"acc_norm_stderr\": 0.04760952285695237\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.5234042553191489,\n \"acc_stderr\": 0.032650194750335815,\n \"acc_norm\": 0.5234042553191489,\n \"acc_norm_stderr\": 0.032650194750335815\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.42105263157894735,\n \"acc_stderr\": 0.046446020912223177,\n \"acc_norm\": 0.42105263157894735,\n \"acc_norm_stderr\": 0.046446020912223177\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.5241379310344828,\n \"acc_stderr\": 0.0416180850350153,\n \"acc_norm\": 0.5241379310344828,\n \"acc_norm_stderr\": 0.0416180850350153\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.4021164021164021,\n \"acc_stderr\": 0.025253032554997692,\n \"acc_norm\": 0.4021164021164021,\n \"acc_norm_stderr\": 0.025253032554997692\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.38095238095238093,\n \"acc_stderr\": 0.04343525428949098,\n \"acc_norm\": 0.38095238095238093,\n \"acc_norm_stderr\": 0.04343525428949098\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.33,\n \"acc_stderr\": 0.04725815626252605,\n \"acc_norm\": 0.33,\n \"acc_norm_stderr\": 0.04725815626252605\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.6903225806451613,\n \"acc_stderr\": 0.026302774983517414,\n \"acc_norm\": 0.6903225806451613,\n \"acc_norm_stderr\": 0.026302774983517414\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.4630541871921182,\n \"acc_stderr\": 0.035083705204426656,\n \"acc_norm\": 0.4630541871921182,\n \"acc_norm_stderr\": 0.035083705204426656\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.61,\n \"acc_stderr\": 0.04902071300001974,\n \"acc_norm\": 0.61,\n \"acc_norm_stderr\": 0.04902071300001974\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.7393939393939394,\n \"acc_stderr\": 0.034277431758165236,\n \"acc_norm\": 0.7393939393939394,\n \"acc_norm_stderr\": 0.034277431758165236\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.7373737373737373,\n \"acc_stderr\": 0.03135305009533084,\n \"acc_norm\": 0.7373737373737373,\n \"acc_norm_stderr\": 0.03135305009533084\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.8290155440414507,\n \"acc_stderr\": 0.02717121368316453,\n \"acc_norm\": 0.8290155440414507,\n \"acc_norm_stderr\": 0.02717121368316453\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.5820512820512821,\n \"acc_stderr\": 0.025007329882461217,\n \"acc_norm\": 0.5820512820512821,\n \"acc_norm_stderr\": 0.025007329882461217\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.31851851851851853,\n \"acc_stderr\": 0.02840653309060846,\n \"acc_norm\": 0.31851851851851853,\n \"acc_norm_stderr\": 0.02840653309060846\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.6050420168067226,\n \"acc_stderr\": 0.03175367846096626,\n \"acc_norm\": 0.6050420168067226,\n \"acc_norm_stderr\": 0.03175367846096626\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.3443708609271523,\n \"acc_stderr\": 0.038796870240733264,\n \"acc_norm\": 0.3443708609271523,\n \"acc_norm_stderr\": 0.038796870240733264\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.7798165137614679,\n \"acc_stderr\": 0.01776597865232753,\n \"acc_norm\": 0.7798165137614679,\n \"acc_norm_stderr\": 0.01776597865232753\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.4583333333333333,\n \"acc_stderr\": 0.03398110890294636,\n \"acc_norm\": 0.4583333333333333,\n \"acc_norm_stderr\": 0.03398110890294636\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.7696078431372549,\n \"acc_stderr\": 0.029554292605695066,\n \"acc_norm\": 0.7696078431372549,\n \"acc_norm_stderr\": 0.029554292605695066\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.7468354430379747,\n \"acc_stderr\": 0.028304657943035307,\n \"acc_norm\": 0.7468354430379747,\n \"acc_norm_stderr\": 0.028304657943035307\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.6098654708520179,\n \"acc_stderr\": 0.03273766725459156,\n \"acc_norm\": 0.6098654708520179,\n \"acc_norm_stderr\": 0.03273766725459156\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.7022900763358778,\n \"acc_stderr\": 0.04010358942462203,\n \"acc_norm\": 0.7022900763358778,\n \"acc_norm_stderr\": 0.04010358942462203\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.7851239669421488,\n \"acc_stderr\": 0.03749492448709695,\n \"acc_norm\": 0.7851239669421488,\n \"acc_norm_stderr\": 0.03749492448709695\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.7222222222222222,\n \"acc_stderr\": 0.043300437496507416,\n \"acc_norm\": 0.7222222222222222,\n \"acc_norm_stderr\": 0.043300437496507416\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.7423312883435583,\n \"acc_stderr\": 0.03436150827846917,\n \"acc_norm\": 0.7423312883435583,\n \"acc_norm_stderr\": 0.03436150827846917\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.48214285714285715,\n \"acc_stderr\": 0.047427623612430116,\n \"acc_norm\": 0.48214285714285715,\n \"acc_norm_stderr\": 0.047427623612430116\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.7281553398058253,\n \"acc_stderr\": 0.044052680241409216,\n \"acc_norm\": 0.7281553398058253,\n \"acc_norm_stderr\": 0.044052680241409216\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8547008547008547,\n \"acc_stderr\": 0.02308663508684141,\n \"acc_norm\": 0.8547008547008547,\n \"acc_norm_stderr\": 0.02308663508684141\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.65,\n \"acc_stderr\": 0.0479372485441102,\n \"acc_norm\": 0.65,\n \"acc_norm_stderr\": 0.0479372485441102\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.7484035759897829,\n \"acc_stderr\": 0.015517322365529636,\n \"acc_norm\": 0.7484035759897829,\n \"acc_norm_stderr\": 0.015517322365529636\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.6705202312138728,\n \"acc_stderr\": 0.025305258131879702,\n \"acc_norm\": 0.6705202312138728,\n \"acc_norm_stderr\": 0.025305258131879702\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.36089385474860336,\n \"acc_stderr\": 0.016062290671110473,\n \"acc_norm\": 0.36089385474860336,\n \"acc_norm_stderr\": 0.016062290671110473\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.6535947712418301,\n \"acc_stderr\": 0.02724561304721536,\n \"acc_norm\": 0.6535947712418301,\n \"acc_norm_stderr\": 0.02724561304721536\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.6720257234726688,\n \"acc_stderr\": 0.02666441088693761,\n \"acc_norm\": 0.6720257234726688,\n \"acc_norm_stderr\": 0.02666441088693761\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.6574074074074074,\n \"acc_stderr\": 0.02640614597362568,\n \"acc_norm\": 0.6574074074074074,\n \"acc_norm_stderr\": 0.02640614597362568\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.41843971631205673,\n \"acc_stderr\": 0.02942799403941999,\n \"acc_norm\": 0.41843971631205673,\n \"acc_norm_stderr\": 0.02942799403941999\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.41590612777053454,\n \"acc_stderr\": 0.012588323850313629,\n \"acc_norm\": 0.41590612777053454,\n \"acc_norm_stderr\": 0.012588323850313629\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.5882352941176471,\n \"acc_stderr\": 0.029896163033125474,\n \"acc_norm\": 0.5882352941176471,\n \"acc_norm_stderr\": 0.029896163033125474\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.6013071895424836,\n \"acc_stderr\": 0.019808281317449848,\n \"acc_norm\": 0.6013071895424836,\n \"acc_norm_stderr\": 0.019808281317449848\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6727272727272727,\n \"acc_stderr\": 0.04494290866252091,\n \"acc_norm\": 0.6727272727272727,\n \"acc_norm_stderr\": 0.04494290866252091\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.6653061224489796,\n \"acc_stderr\": 0.030209235226242307,\n \"acc_norm\": 0.6653061224489796,\n \"acc_norm_stderr\": 0.030209235226242307\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.7960199004975125,\n \"acc_stderr\": 0.02849317624532607,\n \"acc_norm\": 0.7960199004975125,\n \"acc_norm_stderr\": 0.02849317624532607\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.82,\n \"acc_stderr\": 0.038612291966536934,\n \"acc_norm\": 0.82,\n \"acc_norm_stderr\": 0.038612291966536934\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5,\n \"acc_stderr\": 0.03892494720807614,\n \"acc_norm\": 0.5,\n \"acc_norm_stderr\": 0.03892494720807614\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.7777777777777778,\n \"acc_stderr\": 0.031885780176863984,\n \"acc_norm\": 0.7777777777777778,\n \"acc_norm_stderr\": 0.031885780176863984\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.5507955936352509,\n \"mc1_stderr\": 0.017412941986115295,\n \"mc2\": 0.694525955019443,\n \"mc2_stderr\": 0.015330113605051526\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7947908445146015,\n \"acc_stderr\": 0.011350315707462059\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.25473843821076575,\n \"acc_stderr\": 0.012001731232879127\n }\n}\n```", "repo_url": "https://huggingface.co/kwchoi/DPO_mistral_7b_ultra_0124_v1", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_01_25T05_49_50.348304", "path": ["**/details_harness|arc:challenge|25_2024-01-25T05-49-50.348304.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-01-25T05-49-50.348304.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_01_25T05_49_50.348304", "path": ["**/details_harness|gsm8k|5_2024-01-25T05-49-50.348304.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-01-25T05-49-50.348304.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_01_25T05_49_50.348304", "path": ["**/details_harness|hellaswag|10_2024-01-25T05-49-50.348304.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-01-25T05-49-50.348304.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_01_25T05_49_50.348304", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-25T05-49-50.348304.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-25T05-49-50.348304.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-25T05-49-50.348304.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-25T05-49-50.348304.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-25T05-49-50.348304.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-25T05-49-50.348304.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-25T05-49-50.348304.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-25T05-49-50.348304.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-25T05-49-50.348304.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-25T05-49-50.348304.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-25T05-49-50.348304.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-25T05-49-50.348304.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-25T05-49-50.348304.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-25T05-49-50.348304.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-25T05-49-50.348304.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-25T05-49-50.348304.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-25T05-49-50.348304.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-25T05-49-50.348304.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-25T05-49-50.348304.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-25T05-49-50.348304.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-25T05-49-50.348304.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-25T05-49-50.348304.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-25T05-49-50.348304.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-25T05-49-50.348304.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-25T05-49-50.348304.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-25T05-49-50.348304.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-25T05-49-50.348304.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-25T05-49-50.348304.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-25T05-49-50.348304.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-25T05-49-50.348304.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-25T05-49-50.348304.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-25T05-49-50.348304.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-25T05-49-50.348304.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-25T05-49-50.348304.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-25T05-49-50.348304.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-25T05-49-50.348304.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-25T05-49-50.348304.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-25T05-49-50.348304.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-25T05-49-50.348304.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-25T05-49-50.348304.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-25T05-49-50.348304.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-25T05-49-50.348304.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-25T05-49-50.348304.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-25T05-49-50.348304.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-25T05-49-50.348304.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-25T05-49-50.348304.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-25T05-49-50.348304.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-25T05-49-50.348304.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-25T05-49-50.348304.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-25T05-49-50.348304.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-25T05-49-50.348304.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-25T05-49-50.348304.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-25T05-49-50.348304.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-25T05-49-50.348304.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-25T05-49-50.348304.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-25T05-49-50.348304.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-25T05-49-50.348304.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-25T05-49-50.348304.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-25T05-49-50.348304.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-25T05-49-50.348304.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-25T05-49-50.348304.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-25T05-49-50.348304.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-25T05-49-50.348304.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-25T05-49-50.348304.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-25T05-49-50.348304.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-25T05-49-50.348304.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-25T05-49-50.348304.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-25T05-49-50.348304.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-25T05-49-50.348304.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-25T05-49-50.348304.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-25T05-49-50.348304.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-25T05-49-50.348304.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-25T05-49-50.348304.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-25T05-49-50.348304.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-25T05-49-50.348304.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-25T05-49-50.348304.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-25T05-49-50.348304.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-25T05-49-50.348304.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-25T05-49-50.348304.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-25T05-49-50.348304.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-25T05-49-50.348304.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-25T05-49-50.348304.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-25T05-49-50.348304.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-25T05-49-50.348304.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-25T05-49-50.348304.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-25T05-49-50.348304.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-25T05-49-50.348304.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-25T05-49-50.348304.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-25T05-49-50.348304.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-25T05-49-50.348304.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-25T05-49-50.348304.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-25T05-49-50.348304.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-25T05-49-50.348304.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-25T05-49-50.348304.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-25T05-49-50.348304.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-25T05-49-50.348304.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-25T05-49-50.348304.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-25T05-49-50.348304.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-25T05-49-50.348304.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-25T05-49-50.348304.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-25T05-49-50.348304.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-25T05-49-50.348304.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-25T05-49-50.348304.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-25T05-49-50.348304.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-25T05-49-50.348304.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-25T05-49-50.348304.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-25T05-49-50.348304.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-25T05-49-50.348304.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-25T05-49-50.348304.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-25T05-49-50.348304.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-25T05-49-50.348304.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-25T05-49-50.348304.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-25T05-49-50.348304.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-25T05-49-50.348304.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_01_25T05_49_50.348304", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-25T05-49-50.348304.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-25T05-49-50.348304.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_01_25T05_49_50.348304", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-25T05-49-50.348304.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-25T05-49-50.348304.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_01_25T05_49_50.348304", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-25T05-49-50.348304.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-25T05-49-50.348304.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_01_25T05_49_50.348304", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-25T05-49-50.348304.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-25T05-49-50.348304.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_01_25T05_49_50.348304", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-25T05-49-50.348304.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-25T05-49-50.348304.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_01_25T05_49_50.348304", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-25T05-49-50.348304.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-25T05-49-50.348304.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_01_25T05_49_50.348304", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-25T05-49-50.348304.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-25T05-49-50.348304.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_01_25T05_49_50.348304", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-25T05-49-50.348304.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-25T05-49-50.348304.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_01_25T05_49_50.348304", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-25T05-49-50.348304.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-25T05-49-50.348304.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_01_25T05_49_50.348304", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-25T05-49-50.348304.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-25T05-49-50.348304.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_01_25T05_49_50.348304", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-25T05-49-50.348304.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-25T05-49-50.348304.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_01_25T05_49_50.348304", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-25T05-49-50.348304.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-25T05-49-50.348304.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_01_25T05_49_50.348304", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-25T05-49-50.348304.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-25T05-49-50.348304.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_01_25T05_49_50.348304", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-25T05-49-50.348304.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-25T05-49-50.348304.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_01_25T05_49_50.348304", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-25T05-49-50.348304.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-25T05-49-50.348304.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_01_25T05_49_50.348304", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-25T05-49-50.348304.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-25T05-49-50.348304.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_01_25T05_49_50.348304", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-25T05-49-50.348304.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-25T05-49-50.348304.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_01_25T05_49_50.348304", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-25T05-49-50.348304.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-25T05-49-50.348304.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_01_25T05_49_50.348304", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-25T05-49-50.348304.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-25T05-49-50.348304.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_01_25T05_49_50.348304", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-25T05-49-50.348304.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-25T05-49-50.348304.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_01_25T05_49_50.348304", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-25T05-49-50.348304.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-25T05-49-50.348304.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_01_25T05_49_50.348304", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-25T05-49-50.348304.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-25T05-49-50.348304.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_01_25T05_49_50.348304", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-25T05-49-50.348304.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-25T05-49-50.348304.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_01_25T05_49_50.348304", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-25T05-49-50.348304.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-25T05-49-50.348304.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_01_25T05_49_50.348304", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-25T05-49-50.348304.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-25T05-49-50.348304.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_01_25T05_49_50.348304", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-25T05-49-50.348304.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-25T05-49-50.348304.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_01_25T05_49_50.348304", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-25T05-49-50.348304.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-25T05-49-50.348304.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_01_25T05_49_50.348304", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-25T05-49-50.348304.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-25T05-49-50.348304.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_01_25T05_49_50.348304", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-25T05-49-50.348304.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-25T05-49-50.348304.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_01_25T05_49_50.348304", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-25T05-49-50.348304.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-25T05-49-50.348304.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_01_25T05_49_50.348304", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-25T05-49-50.348304.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-25T05-49-50.348304.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_01_25T05_49_50.348304", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-25T05-49-50.348304.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-25T05-49-50.348304.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_01_25T05_49_50.348304", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-25T05-49-50.348304.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-25T05-49-50.348304.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_01_25T05_49_50.348304", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-25T05-49-50.348304.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-25T05-49-50.348304.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_01_25T05_49_50.348304", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-25T05-49-50.348304.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-25T05-49-50.348304.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_01_25T05_49_50.348304", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-25T05-49-50.348304.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-25T05-49-50.348304.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_01_25T05_49_50.348304", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-25T05-49-50.348304.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-25T05-49-50.348304.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_01_25T05_49_50.348304", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-25T05-49-50.348304.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-25T05-49-50.348304.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_01_25T05_49_50.348304", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-25T05-49-50.348304.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-25T05-49-50.348304.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_01_25T05_49_50.348304", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-25T05-49-50.348304.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-25T05-49-50.348304.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_01_25T05_49_50.348304", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-25T05-49-50.348304.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-25T05-49-50.348304.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_01_25T05_49_50.348304", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-25T05-49-50.348304.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-25T05-49-50.348304.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_01_25T05_49_50.348304", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-25T05-49-50.348304.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-25T05-49-50.348304.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_01_25T05_49_50.348304", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-25T05-49-50.348304.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-25T05-49-50.348304.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_01_25T05_49_50.348304", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-25T05-49-50.348304.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-25T05-49-50.348304.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_01_25T05_49_50.348304", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-25T05-49-50.348304.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-25T05-49-50.348304.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_01_25T05_49_50.348304", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-25T05-49-50.348304.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-25T05-49-50.348304.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_01_25T05_49_50.348304", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-25T05-49-50.348304.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-25T05-49-50.348304.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_01_25T05_49_50.348304", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-25T05-49-50.348304.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-25T05-49-50.348304.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_01_25T05_49_50.348304", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-25T05-49-50.348304.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-25T05-49-50.348304.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_01_25T05_49_50.348304", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-25T05-49-50.348304.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-25T05-49-50.348304.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_01_25T05_49_50.348304", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-25T05-49-50.348304.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-25T05-49-50.348304.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_01_25T05_49_50.348304", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-25T05-49-50.348304.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-25T05-49-50.348304.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_01_25T05_49_50.348304", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-25T05-49-50.348304.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-25T05-49-50.348304.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_01_25T05_49_50.348304", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-25T05-49-50.348304.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-25T05-49-50.348304.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_01_25T05_49_50.348304", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-25T05-49-50.348304.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-25T05-49-50.348304.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_01_25T05_49_50.348304", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-25T05-49-50.348304.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-25T05-49-50.348304.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_01_25T05_49_50.348304", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-25T05-49-50.348304.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-25T05-49-50.348304.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_01_25T05_49_50.348304", "path": ["**/details_harness|winogrande|5_2024-01-25T05-49-50.348304.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-01-25T05-49-50.348304.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_01_25T05_49_50.348304", "path": ["results_2024-01-25T05-49-50.348304.parquet"]}, {"split": "latest", "path": ["results_2024-01-25T05-49-50.348304.parquet"]}]}]}
2024-01-25T05:52:34+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of kwchoi/DPO_mistral_7b_ultra_0124_v1 Dataset automatically created during the evaluation run of model kwchoi/DPO_mistral_7b_ultra_0124_v1 on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-01-25T05:49:50.348304(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of kwchoi/DPO_mistral_7b_ultra_0124_v1\n\n\n\nDataset automatically created during the evaluation run of model kwchoi/DPO_mistral_7b_ultra_0124_v1 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-25T05:49:50.348304(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of kwchoi/DPO_mistral_7b_ultra_0124_v1\n\n\n\nDataset automatically created during the evaluation run of model kwchoi/DPO_mistral_7b_ultra_0124_v1 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-25T05:49:50.348304(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ 6, 203, 67, 4, 40, 29, 3, 4, 9, 6, 5, 7, 4, 7, 10, 9, 5, 9, 8, 10, 46, 8, 7, 10, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of kwchoi/DPO_mistral_7b_ultra_0124_v1\n\n\n\nDataset automatically created during the evaluation run of model kwchoi/DPO_mistral_7b_ultra_0124_v1 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2024-01-25T05:49:50.348304(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):## Dataset Details### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:## Uses### Direct Use### Out-of-Scope Use## Dataset Structure## Dataset Creation### Curation Rationale### Source Data#### Data Collection and Processing#### Who are the source data producers?### Annotations [optional]#### Annotation process#### Who are the annotators?#### Personal and Sensitive Information## Bias, Risks, and Limitations### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:## Glossary [optional]## More Information [optional]" ]
[ -0.0744793489575386, 0.20565597712993622, -0.0058053708635270596, 0.027609679847955704, 0.0779055580496788, -0.029694942757487297, 0.02340068481862545, 0.12519492208957672, 0.01865192875266075, 0.17207129299640656, -0.015034573152661324, 0.09383920580148697, 0.09183687716722488, 0.18519334495067596, 0.010792120359838009, -0.15461862087249756, 0.02203461155295372, -0.07758009433746338, 0.06796605885028839, 0.07444050163030624, 0.08254342526197433, -0.09582169353961945, 0.057206206023693085, -0.016983792185783386, -0.026181332767009735, -0.012064102105796337, -0.08462435007095337, -0.03895634412765503, 0.0798124149441719, 0.0667852833867073, 0.029042130336165428, -0.01534976251423359, 0.0042596980929374695, -0.24152791500091553, 0.021415093913674355, 0.07894354313611984, 0.031503960490226746, 0.06919775903224945, 0.13398794829845428, -0.057448286563158035, 0.0747455433011055, -0.08640691637992859, 0.04748929291963577, 0.03685971349477768, -0.11050941795110703, -0.08206403255462646, -0.16476662456989288, 0.01772262714803219, 0.08388347178697586, 0.055962834507226944, -0.026033727452158928, 0.11875762045383453, -0.00842736754566431, 0.02889067679643631, 0.1480810046195984, -0.13322573900222778, -0.02308260276913643, -0.023339442908763885, 0.02852710708975792, 0.029167471453547478, -0.10709875822067261, -0.02771173045039177, 0.01991971768438816, 0.033954109996557236, 0.0023562225978821516, 0.010692566633224487, -0.008015433326363564, 0.005358789581805468, -0.13070327043533325, -0.07432248443365097, 0.13942177593708038, 0.004570641089230776, -0.052008986473083496, -0.14123116433620453, -0.031460270285606384, -0.041621025651693344, -0.006185134407132864, -0.021535316482186317, 0.020367126911878586, -0.020754346624016762, 0.05632667616009712, 0.0006496125715784729, -0.08878432959318161, -0.011939166113734245, -0.04350990429520607, 0.06510835140943527, 0.018685195595026016, -0.01374561246484518, -0.008216346614062786, 0.12332693487405777, 0.02141685038805008, -0.10617562383413315, -0.10777157545089722, -0.03767518326640129, -0.09380979090929031, -0.04974858835339546, 0.0038291194941848516, -0.04790328070521355, 0.03463876619935036, 0.22512279450893402, -0.06786686182022095, 0.010606314986944199, -0.07453612983226776, -0.006882356014102697, 0.11267156898975372, 0.0714319720864296, -0.04144919291138649, -0.049835700541734695, -0.009273339062929153, 0.038766223937273026, 0.018650436773896217, -0.009449491277337074, 0.03256618231534958, 0.06674834340810776, 0.03694742172956467, 0.12480787187814713, 0.12250668555498123, 0.015861012041568756, -0.06760991364717484, -0.027561277151107788, 0.18499931693077087, -0.17660510540008545, -0.006781959906220436, 0.007665563374757767, -0.05638759210705757, -0.08633911609649658, 0.04187662526965141, -0.002781182061880827, -0.056975435465574265, 0.10586372762918472, -0.07040940225124359, -0.051683664321899414, -0.07915285229682922, -0.04392312094569206, 0.05422269552946091, -0.01730731502175331, -0.04118451103568077, -0.07020322233438492, -0.1260310411453247, -0.08728788793087006, -0.0030148273799568415, -0.07570921629667282, -0.012587417848408222, 0.02366754040122032, -0.019229743629693985, -0.016817884519696236, -0.01768575981259346, 0.13068179786205292, -0.0696013793349266, 0.02233041822910309, -0.009328250773251057, -0.00014543159340973943, 0.09016156941652298, 0.04568435996770859, -0.1276407539844513, 0.08198288828134537, -0.07929615676403046, 0.12121320515871048, -0.08885412663221359, 0.0024135562125593424, -0.1283799111843109, 0.0014084083959460258, -0.04801750183105469, 0.003300029318779707, 0.02703094109892845, 0.11555899679660797, -0.2567858397960663, 0.005347708240151405, 0.11404978483915329, -0.09597000479698181, -0.08767718076705933, 0.036261897534132004, -0.03509736433625221, 0.057218268513679504, 0.060787979513406754, 0.08522480726242065, 0.11971566826105118, -0.06793773919343948, -0.14538908004760742, -0.0937701091170311, -0.004496440757066011, 0.12893015146255493, 0.056483712047338486, -0.07063805311918259, 0.16441763937473297, 0.035587627440690994, -0.03138791769742966, -0.04990709200501442, -0.021496182307600975, -0.052898794412612915, -0.00880616158246994, -0.0504629984498024, -0.09159470349550247, -0.013527050614356995, -0.05373847112059593, -0.021834775805473328, -0.07641094923019409, 0.05068720504641533, 0.09197914600372314, 0.019806934520602226, 0.015529317781329155, -0.07560037076473236, 0.06347303092479706, 0.004268251359462738, 0.017266253009438515, -0.22945347428321838, -0.0960555151104927, 0.040191538631916046, -0.13618388772010803, 0.025131791830062866, -0.00205499236471951, 0.009424323216080666, 0.030007347464561462, 0.0034857974387705326, 0.013558563776314259, 0.0002561442961450666, -0.004462031181901693, -0.028477851301431656, -0.13263750076293945, -0.03278302028775215, -0.07942070811986923, 0.042577289044857025, -0.1122516617178917, -0.02806568332016468, 0.10221841931343079, 0.1800440102815628, 0.024557562544941902, -0.0948479175567627, 0.06781189888715744, 0.005802063271403313, -0.036692261695861816, -0.07141392678022385, 0.00006811478669987991, -0.010840972885489464, 0.05781598761677742, 0.063081756234169, -0.18937507271766663, -0.137535959482193, 0.07163941860198975, 0.1554909199476242, -0.06439439207315445, -0.04644278064370155, -0.09428833425045013, -0.04750540480017662, -0.08865240961313248, -0.05195460468530655, 0.09268275648355484, 0.07806286960840225, 0.02383558638393879, -0.06039484590291977, -0.10093310475349426, -0.01038401946425438, 0.07395495474338531, -0.05688423663377762, 0.08971311897039413, 0.03902938961982727, -0.10860239714384079, 0.09522943198680878, 0.020879225805401802, 0.1380583792924881, 0.10490702837705612, -0.015965409576892853, -0.12539523839950562, -0.010390455834567547, 0.06250379979610443, 0.0326470248401165, 0.08616647124290466, 0.007395673077553511, 0.021426567807793617, 0.0781426951289177, -0.0047639585100114346, 0.03937576338648796, -0.060750968754291534, 0.04059828072786331, 0.022179974243044853, -0.004396768752485514, 0.013565312139689922, 0.028143273666501045, 0.013305660337209702, 0.07000402361154556, 0.05125603824853897, 0.11232983320951462, -0.026661381125450134, -0.04358744993805885, -0.0836237296462059, 0.14406250417232513, -0.0974811315536499, -0.2235317826271057, -0.12341409176588058, -0.030278973281383514, -0.009633060544729233, -0.012401408515870571, 0.05093396082520485, -0.005864520091563463, -0.09165994822978973, -0.10247057676315308, 0.030545778572559357, 0.0553043894469738, -0.09476712346076965, -0.009799204766750336, 0.026142360642552376, 0.022772623226046562, -0.16494296491146088, 0.035406388342380524, 0.026874059811234474, -0.014015428721904755, -0.01560920849442482, 0.1155458390712738, 0.11993526667356491, 0.06439713388681412, 0.054118186235427856, -0.029622036963701248, -0.0044325413182377815, 0.19065424799919128, -0.09702029824256897, 0.024220159277319908, 0.0948408916592598, -0.04891423135995865, 0.06598691642284393, 0.14317084848880768, -0.00005337765469448641, -0.09048449993133545, 0.03747600317001343, 0.09430820494890213, -0.04746682569384575, -0.26291102170944214, -0.06273192912340164, -0.03610677644610405, -0.007991032674908638, 0.08411398530006409, 0.07763216644525528, -0.030350172892212868, 0.010761610232293606, -0.1155499741435051, -0.031312692910432816, -0.06267421692609787, 0.06286448240280151, 0.043090201914310455, 0.02213100902736187, 0.0499170683324337, -0.04091837257146835, 0.039633553475141525, 0.10921954363584518, 0.01848522387444973, 0.19241885840892792, -0.057127684354782104, 0.16540437936782837, 0.09237449616193771, 0.08111435920000076, -0.029724789783358574, 0.06547904014587402, 0.0038339532911777496, 0.06214470788836479, 0.008631299249827862, -0.1024063304066658, -0.03485898673534393, 0.07826988399028778, -0.0006383882137015462, -0.030935173854231834, 0.042535264045000076, -0.032439637929201126, 0.0628025010228157, 0.19745585322380066, 0.006178122945129871, -0.14407388865947723, -0.08118996769189835, 0.04772296920418739, -0.043221209198236465, -0.08729928731918335, -0.03275841474533081, 0.054337307810783386, -0.12527264654636383, 0.02569710649549961, -0.03599928319454193, 0.08562089502811432, -0.12271562218666077, -0.023381667211651802, 0.0034324752632528543, 0.0890699028968811, -0.020055819302797318, 0.08557428419589996, -0.12864267826080322, 0.10290113091468811, -0.003043022006750107, 0.0649074912071228, -0.07280365377664566, 0.06099288910627365, 0.0031005090568214655, -0.08543021231889725, 0.119969941675663, 0.009822574444115162, -0.09259676188230515, -0.0372319296002388, -0.13486967980861664, -0.0010702681029215455, 0.06773974746465683, -0.1315319836139679, 0.1148807480931282, 0.02807861752808094, -0.018562527373433113, -0.03169840946793556, -0.030827011913061142, -0.12495415657758713, -0.19962327182292938, 0.1300477683544159, -0.1283409595489502, 0.08254654705524445, -0.059393297880887985, -0.05243787541985512, -0.033494625240564346, 0.16501881182193756, -0.09802382439374924, -0.08187420666217804, -0.12488985806703568, 0.042864736169576645, 0.17760080099105835, -0.06856332719326019, 0.0612485408782959, -0.03282860293984413, 0.1828070878982544, 0.007425918243825436, -0.06396647542715073, 0.006265812087804079, -0.07520101964473724, -0.17336349189281464, -0.04379170387983322, 0.135773167014122, 0.06482463330030441, -0.00856289453804493, -0.0064025754109025, 0.060048092156648636, 0.005777659360319376, -0.08873289078474045, 0.04164187237620354, 0.07273493707180023, 0.11198929697275162, 0.01939934305846691, -0.037507764995098114, -0.09717147052288055, -0.1239573061466217, -0.0909588560461998, 0.04673294350504875, 0.1559426635503769, -0.047087203711271286, 0.13761235773563385, 0.09439064562320709, -0.09006206691265106, -0.1636684387922287, -0.040287040174007416, 0.03707699850201607, -0.01825038343667984, 0.09662587940692902, -0.18449613451957703, 0.06236514821648598, 0.07677572965621948, -0.0197913721203804, 0.1659376323223114, -0.217447891831398, -0.15117290616035461, 0.0260965283960104, 0.022211816161870956, -0.17730344831943512, -0.14560876786708832, -0.12399753928184509, -0.002815327374264598, -0.14836549758911133, 0.13713830709457397, 0.004025424364954233, 0.026742687448859215, -0.02035578340291977, 0.05973728373646736, 0.042966701090335846, -0.05372242629528046, 0.1267152726650238, 0.006196802016347647, 0.018546201288700104, -0.09487973153591156, -0.03306886926293373, -0.0013784548500552773, -0.0659448429942131, 0.056643106043338776, 0.03193530812859535, 0.06882277131080627, -0.09823868423700333, -0.027168992906808853, -0.0480562262237072, 0.057242535054683685, -0.05673738941550255, -0.05704323947429657, -0.05341900885105133, 0.07526140660047531, 0.06985931843519211, -0.020463308319449425, 0.03233493119478226, -0.020636996254324913, 0.05275602266192436, 0.1873723864555359, 0.06488068401813507, 0.05268142744898796, -0.08656451851129532, -0.02514394000172615, -0.0028529732953757048, -0.0038727063219994307, -0.1146385669708252, 0.03180072456598282, 0.10356391221284866, 0.05403904244303703, 0.0673988088965416, -0.016051767393946648, -0.1826370358467102, 0.01237628422677517, 0.08524361997842789, -0.10680375248193741, -0.1794823706150055, 0.026167776435613632, 0.11456800252199173, -0.1379878968000412, -0.0498591810464859, 0.09433087706565857, 0.022477561607956886, -0.03360633924603462, 0.014345799572765827, 0.06651709973812103, 0.051854103803634644, 0.1172730103135109, -0.011171567253768444, 0.05055946111679077, -0.07695209234952927, 0.11063589155673981, 0.11325933039188385, -0.0837227925658226, 0.01168101467192173, 0.08308260887861252, -0.0717310830950737, -0.04819810763001442, -0.003205793909728527, 0.03430740535259247, -0.007650368381291628, -0.015063771978020668, 0.0054245260544121265, -0.043386511504650116, 0.07847931981086731, 0.14623205363750458, -0.013460632413625717, 0.058434393256902695, 0.023929007351398468, 0.003863512771204114, -0.050275638699531555, 0.12203032523393631, 0.051443107426166534, 0.04674878716468811, -0.03388574719429016, 0.02679666131734848, -0.0012412425130605698, -0.01006709411740303, 0.014239141717553139, -0.036517608910799026, -0.029453353956341743, -0.024971889331936836, -0.18051090836524963, 0.013462040573358536, -0.09663070738315582, -0.024141347035765648, -0.020169172435998917, -0.034103963524103165, -0.023445140570402145, 0.03465234115719795, -0.06448410451412201, -0.069737508893013, -0.06312023848295212, 0.09228198230266571, -0.1882891207933426, 0.004426297266036272, 0.07814877480268478, -0.08224483579397202, 0.0860566794872284, 0.04212477058172226, 0.00012197190517326817, 0.003697925480082631, -0.07117242366075516, -0.020644308999180794, -0.02023233100771904, 0.038224801421165466, 0.03523186221718788, -0.16359059512615204, -0.012774961069226265, 0.008257183246314526, -0.06173819676041603, -0.024276334792375565, 0.046086836606264114, -0.15260723233222961, 0.011900108307600021, 0.062372904270887375, -0.0050098346546292305, -0.04053887724876404, 0.029662687331438065, 0.07509949803352356, 0.018284868448972702, 0.10367210954427719, -0.003485264955088496, 0.03789053484797478, -0.16560108959674835, -0.04133929684758186, 0.005124940071254969, -0.004838775377720594, 0.021877815946936607, 0.02871936373412609, 0.04277825728058815, -0.022158948704600334, 0.19157299399375916, -0.059064365923404694, 0.04512161388993263, 0.0491136759519577, -0.008324834518134594, -0.07680918276309967, 0.037584640085697174, 0.07682067155838013, 0.02685961127281189, 0.0070766848511993885, 0.00018716185877565295, -0.036297883838415146, -0.0395694263279438, -0.07679523527622223, 0.09524356573820114, 0.1522066295146942, 0.17506913840770721, -0.01514019351452589, 0.06353554129600525, -0.1642841100692749, -0.07455170899629593, 0.010520449839532375, -0.07058237493038177, 0.037015028297901154, -0.0582069456577301, 0.06241884455084801, 0.08155613392591476, -0.12588882446289062, 0.10388878732919693, -0.06126676872372627, -0.029547177255153656, -0.02102857455611229, -0.13649491965770721, -0.027406135573983192, 0.008855834603309631, 0.00933675467967987, -0.0941283330321312, 0.11053233593702316, 0.09506053477525711, -0.0053904238156974316, -0.010441157035529613, 0.11093075573444366, -0.0855276957154274, -0.08208930492401123, 0.0066766636446118355, 0.016625363379716873, 0.01782746985554695, 0.01237835269421339, 0.0547063983976841, 0.017482241615653038, 0.0688830092549324, 0.07236528396606445, 0.07411553710699081, 0.05349263548851013, 0.0284946970641613, -0.017084715887904167, -0.05807120352983475, 0.014750233851373196, -0.006800511386245489, -0.051468245685100555, 0.17735709249973297, 0.05150016397237778, 0.039413925260305405, 0.018763581290841103, 0.2133140116930008, 0.0027841886039823294, -0.055615317076444626, -0.12772172689437866, 0.08781477808952332, 0.026829810813069344, 0.011450941674411297, 0.034004535526037216, -0.14982326328754425, 0.027201401069760323, 0.15181097388267517, 0.06703528761863708, 0.0349712073802948, 0.007134553510695696, 0.03233011066913605, 0.02365206554532051, -0.03731675073504448, 0.02239028736948967, 0.05958221107721329, 0.17388764023780823, -0.03878479078412056, 0.04907780885696411, -0.004342211410403252, -0.024710768833756447, -0.014857867732644081, 0.09387994557619095, -0.04970802739262581, 0.016677025705575943, -0.06859955936670303, 0.08086579293012619, -0.06380493938922882, -0.31501200795173645, 0.0010679815895855427, -0.09765060245990753, -0.14966796338558197, -0.01916416548192501, 0.0352102555334568, -0.021935367956757545, 0.03354683890938759, 0.0483839176595211, -0.018210923299193382, 0.1800195425748825, 0.019094115123152733, -0.0533582866191864, -0.06908747553825378, 0.07422637194395065, -0.04330513998866081, 0.2061067521572113, -0.006311201490461826, 0.03167663887143135, 0.09367716312408447, 0.008222398348152637, -0.19158680737018585, 0.02466273121535778, 0.08633027970790863, -0.07640746235847473, 0.050525546073913574, 0.17933984100818634, -0.011487063020467758, 0.07500536739826202, 0.05955716222524643, 0.01494909729808569, 0.04265589639544487, 0.07381678372621536, 0.033812880516052246, -0.10098644345998764, 0.05700366199016571, -0.07995807379484177, 0.13888271152973175, 0.12047417461872101, -0.030185330659151077, 0.017891429364681244, -0.055182263255119324, 0.05985233187675476, -0.01811513677239418, 0.10199855268001556, -0.01255420595407486, -0.17374595999717712, 0.05891463905572891, 0.08662053197622299, 0.06655653566122055, -0.1924222707748413, -0.05595634505152702, 0.09740287810564041, -0.0567333810031414, 0.0032684332691133022, 0.11156975477933884, 0.03028428927063942, 0.021032514050602913, -0.05558128282427788, -0.08759837597608566, 0.007490681950002909, 0.10719556361436844, -0.09163141995668411, -0.0352880135178566 ]
b60d32f777f11f8ca96a5d3ebec350aab8aa1d12
# Dataset Card for Evaluation run of bartowski/internlm2-math-20b-llama <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [bartowski/internlm2-math-20b-llama](https://huggingface.co/bartowski/internlm2-math-20b-llama) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_bartowski__internlm2-math-20b-llama", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-01-25T05:51:31.434464](https://huggingface.co/datasets/open-llm-leaderboard/details_bartowski__internlm2-math-20b-llama/blob/main/results_2024-01-25T05-51-31.434464.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.6401833310404773, "acc_stderr": 0.03215591321198736, "acc_norm": 0.6526470071520217, "acc_norm_stderr": 0.03295626890086434, "mc1": 0.3623011015911873, "mc1_stderr": 0.016826646897262258, "mc2": 0.5289709889013895, "mc2_stderr": 0.015151269954401329 }, "harness|arc:challenge|25": { "acc": 0.5452218430034129, "acc_stderr": 0.014551507060836355, "acc_norm": 0.5998293515358362, "acc_norm_stderr": 0.014317197787809176 }, "harness|hellaswag|10": { "acc": 0.6307508464449313, "acc_stderr": 0.004816152074023084, "acc_norm": 0.8163712407886875, "acc_norm_stderr": 0.003863898546941598 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.33, "acc_stderr": 0.04725815626252606, "acc_norm": 0.33, "acc_norm_stderr": 0.04725815626252606 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.6222222222222222, "acc_stderr": 0.04188307537595853, "acc_norm": 0.6222222222222222, "acc_norm_stderr": 0.04188307537595853 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.7631578947368421, "acc_stderr": 0.03459777606810534, "acc_norm": 0.7631578947368421, "acc_norm_stderr": 0.03459777606810534 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.66, "acc_stderr": 0.04760952285695237, "acc_norm": 0.66, "acc_norm_stderr": 0.04760952285695237 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.6867924528301886, "acc_stderr": 0.028544793319055326, "acc_norm": 0.6867924528301886, "acc_norm_stderr": 0.028544793319055326 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.7847222222222222, "acc_stderr": 0.03437079344106134, "acc_norm": 0.7847222222222222, "acc_norm_stderr": 0.03437079344106134 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.5, "acc_stderr": 0.050251890762960605, "acc_norm": 0.5, "acc_norm_stderr": 0.050251890762960605 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.51, "acc_stderr": 0.05024183937956911, "acc_norm": 0.51, "acc_norm_stderr": 0.05024183937956911 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.4, "acc_stderr": 0.049236596391733084, "acc_norm": 0.4, "acc_norm_stderr": 0.049236596391733084 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.6647398843930635, "acc_stderr": 0.03599586301247077, "acc_norm": 0.6647398843930635, "acc_norm_stderr": 0.03599586301247077 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.35294117647058826, "acc_stderr": 0.04755129616062947, "acc_norm": 0.35294117647058826, "acc_norm_stderr": 0.04755129616062947 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.78, "acc_stderr": 0.04163331998932261, "acc_norm": 0.78, "acc_norm_stderr": 0.04163331998932261 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.676595744680851, "acc_stderr": 0.030579442773610337, "acc_norm": 0.676595744680851, "acc_norm_stderr": 0.030579442773610337 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.4473684210526316, "acc_stderr": 0.046774730044911984, "acc_norm": 0.4473684210526316, "acc_norm_stderr": 0.046774730044911984 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.6137931034482759, "acc_stderr": 0.04057324734419036, "acc_norm": 0.6137931034482759, "acc_norm_stderr": 0.04057324734419036 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.4947089947089947, "acc_stderr": 0.02574986828855657, "acc_norm": 0.4947089947089947, "acc_norm_stderr": 0.02574986828855657 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.46825396825396826, "acc_stderr": 0.04463112720677172, "acc_norm": 0.46825396825396826, "acc_norm_stderr": 0.04463112720677172 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.34, "acc_stderr": 0.04760952285695236, "acc_norm": 0.34, "acc_norm_stderr": 0.04760952285695236 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.8096774193548387, "acc_stderr": 0.02233170761182307, "acc_norm": 0.8096774193548387, "acc_norm_stderr": 0.02233170761182307 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.4827586206896552, "acc_stderr": 0.035158955511656986, "acc_norm": 0.4827586206896552, "acc_norm_stderr": 0.035158955511656986 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.69, "acc_stderr": 0.04648231987117316, "acc_norm": 0.69, "acc_norm_stderr": 0.04648231987117316 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.7878787878787878, "acc_stderr": 0.031922715695483016, "acc_norm": 0.7878787878787878, "acc_norm_stderr": 0.031922715695483016 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.8282828282828283, "acc_stderr": 0.026869716187429903, "acc_norm": 0.8282828282828283, "acc_norm_stderr": 0.026869716187429903 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.8704663212435233, "acc_stderr": 0.024233532297758733, "acc_norm": 0.8704663212435233, "acc_norm_stderr": 0.024233532297758733 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.6564102564102564, "acc_stderr": 0.024078696580635477, "acc_norm": 0.6564102564102564, "acc_norm_stderr": 0.024078696580635477 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.4074074074074074, "acc_stderr": 0.029958249250082114, "acc_norm": 0.4074074074074074, "acc_norm_stderr": 0.029958249250082114 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.7142857142857143, "acc_stderr": 0.02934457250063434, "acc_norm": 0.7142857142857143, "acc_norm_stderr": 0.02934457250063434 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.4370860927152318, "acc_stderr": 0.04050035722230636, "acc_norm": 0.4370860927152318, "acc_norm_stderr": 0.04050035722230636 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.8366972477064221, "acc_stderr": 0.01584825580650153, "acc_norm": 0.8366972477064221, "acc_norm_stderr": 0.01584825580650153 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.5787037037037037, "acc_stderr": 0.033674621388960775, "acc_norm": 0.5787037037037037, "acc_norm_stderr": 0.033674621388960775 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.8284313725490197, "acc_stderr": 0.026460569561240658, "acc_norm": 0.8284313725490197, "acc_norm_stderr": 0.026460569561240658 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.8059071729957806, "acc_stderr": 0.025744902532290913, "acc_norm": 0.8059071729957806, "acc_norm_stderr": 0.025744902532290913 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.6995515695067265, "acc_stderr": 0.03076935200822915, "acc_norm": 0.6995515695067265, "acc_norm_stderr": 0.03076935200822915 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.648854961832061, "acc_stderr": 0.04186445163013751, "acc_norm": 0.648854961832061, "acc_norm_stderr": 0.04186445163013751 }, "harness|hendrycksTest-international_law|5": { "acc": 0.7520661157024794, "acc_stderr": 0.03941897526516303, "acc_norm": 0.7520661157024794, "acc_norm_stderr": 0.03941897526516303 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.7407407407407407, "acc_stderr": 0.042365112580946336, "acc_norm": 0.7407407407407407, "acc_norm_stderr": 0.042365112580946336 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.754601226993865, "acc_stderr": 0.03380939813943354, "acc_norm": 0.754601226993865, "acc_norm_stderr": 0.03380939813943354 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.4642857142857143, "acc_stderr": 0.04733667890053756, "acc_norm": 0.4642857142857143, "acc_norm_stderr": 0.04733667890053756 }, "harness|hendrycksTest-management|5": { "acc": 0.8058252427184466, "acc_stderr": 0.039166677628225836, "acc_norm": 0.8058252427184466, "acc_norm_stderr": 0.039166677628225836 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8760683760683761, "acc_stderr": 0.02158649400128136, "acc_norm": 0.8760683760683761, "acc_norm_stderr": 0.02158649400128136 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.72, "acc_stderr": 0.045126085985421276, "acc_norm": 0.72, "acc_norm_stderr": 0.045126085985421276 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.8084291187739464, "acc_stderr": 0.014072859310451949, "acc_norm": 0.8084291187739464, "acc_norm_stderr": 0.014072859310451949 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.6820809248554913, "acc_stderr": 0.025070713719153183, "acc_norm": 0.6820809248554913, "acc_norm_stderr": 0.025070713719153183 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.5407821229050279, "acc_stderr": 0.016666783616525776, "acc_norm": 0.5407821229050279, "acc_norm_stderr": 0.016666783616525776 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.738562091503268, "acc_stderr": 0.025160998214292456, "acc_norm": 0.738562091503268, "acc_norm_stderr": 0.025160998214292456 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.6977491961414791, "acc_stderr": 0.02608270069539966, "acc_norm": 0.6977491961414791, "acc_norm_stderr": 0.02608270069539966 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.7191358024691358, "acc_stderr": 0.02500646975579922, "acc_norm": 0.7191358024691358, "acc_norm_stderr": 0.02500646975579922 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.5, "acc_stderr": 0.029827499313594685, "acc_norm": 0.5, "acc_norm_stderr": 0.029827499313594685 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.4556714471968709, "acc_stderr": 0.012719949543032218, "acc_norm": 0.4556714471968709, "acc_norm_stderr": 0.012719949543032218 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.6397058823529411, "acc_stderr": 0.029163128570670733, "acc_norm": 0.6397058823529411, "acc_norm_stderr": 0.029163128570670733 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.6552287581699346, "acc_stderr": 0.019228322018696647, "acc_norm": 0.6552287581699346, "acc_norm_stderr": 0.019228322018696647 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6636363636363637, "acc_stderr": 0.04525393596302505, "acc_norm": 0.6636363636363637, "acc_norm_stderr": 0.04525393596302505 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.7306122448979592, "acc_stderr": 0.02840125202902294, "acc_norm": 0.7306122448979592, "acc_norm_stderr": 0.02840125202902294 }, "harness|hendrycksTest-sociology|5": { "acc": 0.8258706467661692, "acc_stderr": 0.026814951200421606, "acc_norm": 0.8258706467661692, "acc_norm_stderr": 0.026814951200421606 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.87, "acc_stderr": 0.033799766898963086, "acc_norm": 0.87, "acc_norm_stderr": 0.033799766898963086 }, "harness|hendrycksTest-virology|5": { "acc": 0.4759036144578313, "acc_stderr": 0.03887971849597264, "acc_norm": 0.4759036144578313, "acc_norm_stderr": 0.03887971849597264 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.7953216374269005, "acc_stderr": 0.030944459778533197, "acc_norm": 0.7953216374269005, "acc_norm_stderr": 0.030944459778533197 }, "harness|truthfulqa:mc|0": { "mc1": 0.3623011015911873, "mc1_stderr": 0.016826646897262258, "mc2": 0.5289709889013895, "mc2_stderr": 0.015151269954401329 }, "harness|winogrande|5": { "acc": 0.7640094711917916, "acc_stderr": 0.011933828850275623 }, "harness|gsm8k|5": { "acc": 0.02122820318423048, "acc_stderr": 0.003970449129848636 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_bartowski__internlm2-math-20b-llama
[ "region:us" ]
2024-01-25T05:53:41+00:00
{"pretty_name": "Evaluation run of bartowski/internlm2-math-20b-llama", "dataset_summary": "Dataset automatically created during the evaluation run of model [bartowski/internlm2-math-20b-llama](https://huggingface.co/bartowski/internlm2-math-20b-llama) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_bartowski__internlm2-math-20b-llama\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-01-25T05:51:31.434464](https://huggingface.co/datasets/open-llm-leaderboard/details_bartowski__internlm2-math-20b-llama/blob/main/results_2024-01-25T05-51-31.434464.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.6401833310404773,\n \"acc_stderr\": 0.03215591321198736,\n \"acc_norm\": 0.6526470071520217,\n \"acc_norm_stderr\": 0.03295626890086434,\n \"mc1\": 0.3623011015911873,\n \"mc1_stderr\": 0.016826646897262258,\n \"mc2\": 0.5289709889013895,\n \"mc2_stderr\": 0.015151269954401329\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.5452218430034129,\n \"acc_stderr\": 0.014551507060836355,\n \"acc_norm\": 0.5998293515358362,\n \"acc_norm_stderr\": 0.014317197787809176\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6307508464449313,\n \"acc_stderr\": 0.004816152074023084,\n \"acc_norm\": 0.8163712407886875,\n \"acc_norm_stderr\": 0.003863898546941598\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.33,\n \"acc_stderr\": 0.04725815626252606,\n \"acc_norm\": 0.33,\n \"acc_norm_stderr\": 0.04725815626252606\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.6222222222222222,\n \"acc_stderr\": 0.04188307537595853,\n \"acc_norm\": 0.6222222222222222,\n \"acc_norm_stderr\": 0.04188307537595853\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.7631578947368421,\n \"acc_stderr\": 0.03459777606810534,\n \"acc_norm\": 0.7631578947368421,\n \"acc_norm_stderr\": 0.03459777606810534\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.66,\n \"acc_stderr\": 0.04760952285695237,\n \"acc_norm\": 0.66,\n \"acc_norm_stderr\": 0.04760952285695237\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.6867924528301886,\n \"acc_stderr\": 0.028544793319055326,\n \"acc_norm\": 0.6867924528301886,\n \"acc_norm_stderr\": 0.028544793319055326\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.7847222222222222,\n \"acc_stderr\": 0.03437079344106134,\n \"acc_norm\": 0.7847222222222222,\n \"acc_norm_stderr\": 0.03437079344106134\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.5,\n \"acc_stderr\": 0.050251890762960605,\n \"acc_norm\": 0.5,\n \"acc_norm_stderr\": 0.050251890762960605\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.51,\n \"acc_stderr\": 0.05024183937956911,\n \"acc_norm\": 0.51,\n \"acc_norm_stderr\": 0.05024183937956911\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.4,\n \"acc_stderr\": 0.049236596391733084,\n \"acc_norm\": 0.4,\n \"acc_norm_stderr\": 0.049236596391733084\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.6647398843930635,\n \"acc_stderr\": 0.03599586301247077,\n \"acc_norm\": 0.6647398843930635,\n \"acc_norm_stderr\": 0.03599586301247077\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.35294117647058826,\n \"acc_stderr\": 0.04755129616062947,\n \"acc_norm\": 0.35294117647058826,\n \"acc_norm_stderr\": 0.04755129616062947\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.78,\n \"acc_stderr\": 0.04163331998932261,\n \"acc_norm\": 0.78,\n \"acc_norm_stderr\": 0.04163331998932261\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.676595744680851,\n \"acc_stderr\": 0.030579442773610337,\n \"acc_norm\": 0.676595744680851,\n \"acc_norm_stderr\": 0.030579442773610337\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.4473684210526316,\n \"acc_stderr\": 0.046774730044911984,\n \"acc_norm\": 0.4473684210526316,\n \"acc_norm_stderr\": 0.046774730044911984\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.6137931034482759,\n \"acc_stderr\": 0.04057324734419036,\n \"acc_norm\": 0.6137931034482759,\n \"acc_norm_stderr\": 0.04057324734419036\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.4947089947089947,\n \"acc_stderr\": 0.02574986828855657,\n \"acc_norm\": 0.4947089947089947,\n \"acc_norm_stderr\": 0.02574986828855657\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.46825396825396826,\n \"acc_stderr\": 0.04463112720677172,\n \"acc_norm\": 0.46825396825396826,\n \"acc_norm_stderr\": 0.04463112720677172\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.34,\n \"acc_stderr\": 0.04760952285695236,\n \"acc_norm\": 0.34,\n \"acc_norm_stderr\": 0.04760952285695236\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.8096774193548387,\n \"acc_stderr\": 0.02233170761182307,\n \"acc_norm\": 0.8096774193548387,\n \"acc_norm_stderr\": 0.02233170761182307\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.4827586206896552,\n \"acc_stderr\": 0.035158955511656986,\n \"acc_norm\": 0.4827586206896552,\n \"acc_norm_stderr\": 0.035158955511656986\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.69,\n \"acc_stderr\": 0.04648231987117316,\n \"acc_norm\": 0.69,\n \"acc_norm_stderr\": 0.04648231987117316\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.7878787878787878,\n \"acc_stderr\": 0.031922715695483016,\n \"acc_norm\": 0.7878787878787878,\n \"acc_norm_stderr\": 0.031922715695483016\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.8282828282828283,\n \"acc_stderr\": 0.026869716187429903,\n \"acc_norm\": 0.8282828282828283,\n \"acc_norm_stderr\": 0.026869716187429903\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.8704663212435233,\n \"acc_stderr\": 0.024233532297758733,\n \"acc_norm\": 0.8704663212435233,\n \"acc_norm_stderr\": 0.024233532297758733\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.6564102564102564,\n \"acc_stderr\": 0.024078696580635477,\n \"acc_norm\": 0.6564102564102564,\n \"acc_norm_stderr\": 0.024078696580635477\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.4074074074074074,\n \"acc_stderr\": 0.029958249250082114,\n \"acc_norm\": 0.4074074074074074,\n \"acc_norm_stderr\": 0.029958249250082114\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.7142857142857143,\n \"acc_stderr\": 0.02934457250063434,\n \"acc_norm\": 0.7142857142857143,\n \"acc_norm_stderr\": 0.02934457250063434\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.4370860927152318,\n \"acc_stderr\": 0.04050035722230636,\n \"acc_norm\": 0.4370860927152318,\n \"acc_norm_stderr\": 0.04050035722230636\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.8366972477064221,\n \"acc_stderr\": 0.01584825580650153,\n \"acc_norm\": 0.8366972477064221,\n \"acc_norm_stderr\": 0.01584825580650153\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.5787037037037037,\n \"acc_stderr\": 0.033674621388960775,\n \"acc_norm\": 0.5787037037037037,\n \"acc_norm_stderr\": 0.033674621388960775\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.8284313725490197,\n \"acc_stderr\": 0.026460569561240658,\n \"acc_norm\": 0.8284313725490197,\n \"acc_norm_stderr\": 0.026460569561240658\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.8059071729957806,\n \"acc_stderr\": 0.025744902532290913,\n \"acc_norm\": 0.8059071729957806,\n \"acc_norm_stderr\": 0.025744902532290913\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.6995515695067265,\n \"acc_stderr\": 0.03076935200822915,\n \"acc_norm\": 0.6995515695067265,\n \"acc_norm_stderr\": 0.03076935200822915\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.648854961832061,\n \"acc_stderr\": 0.04186445163013751,\n \"acc_norm\": 0.648854961832061,\n \"acc_norm_stderr\": 0.04186445163013751\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.7520661157024794,\n \"acc_stderr\": 0.03941897526516303,\n \"acc_norm\": 0.7520661157024794,\n \"acc_norm_stderr\": 0.03941897526516303\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.7407407407407407,\n \"acc_stderr\": 0.042365112580946336,\n \"acc_norm\": 0.7407407407407407,\n \"acc_norm_stderr\": 0.042365112580946336\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.754601226993865,\n \"acc_stderr\": 0.03380939813943354,\n \"acc_norm\": 0.754601226993865,\n \"acc_norm_stderr\": 0.03380939813943354\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.4642857142857143,\n \"acc_stderr\": 0.04733667890053756,\n \"acc_norm\": 0.4642857142857143,\n \"acc_norm_stderr\": 0.04733667890053756\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.8058252427184466,\n \"acc_stderr\": 0.039166677628225836,\n \"acc_norm\": 0.8058252427184466,\n \"acc_norm_stderr\": 0.039166677628225836\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8760683760683761,\n \"acc_stderr\": 0.02158649400128136,\n \"acc_norm\": 0.8760683760683761,\n \"acc_norm_stderr\": 0.02158649400128136\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.72,\n \"acc_stderr\": 0.045126085985421276,\n \"acc_norm\": 0.72,\n \"acc_norm_stderr\": 0.045126085985421276\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.8084291187739464,\n \"acc_stderr\": 0.014072859310451949,\n \"acc_norm\": 0.8084291187739464,\n \"acc_norm_stderr\": 0.014072859310451949\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.6820809248554913,\n \"acc_stderr\": 0.025070713719153183,\n \"acc_norm\": 0.6820809248554913,\n \"acc_norm_stderr\": 0.025070713719153183\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.5407821229050279,\n \"acc_stderr\": 0.016666783616525776,\n \"acc_norm\": 0.5407821229050279,\n \"acc_norm_stderr\": 0.016666783616525776\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.738562091503268,\n \"acc_stderr\": 0.025160998214292456,\n \"acc_norm\": 0.738562091503268,\n \"acc_norm_stderr\": 0.025160998214292456\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.6977491961414791,\n \"acc_stderr\": 0.02608270069539966,\n \"acc_norm\": 0.6977491961414791,\n \"acc_norm_stderr\": 0.02608270069539966\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.7191358024691358,\n \"acc_stderr\": 0.02500646975579922,\n \"acc_norm\": 0.7191358024691358,\n \"acc_norm_stderr\": 0.02500646975579922\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.5,\n \"acc_stderr\": 0.029827499313594685,\n \"acc_norm\": 0.5,\n \"acc_norm_stderr\": 0.029827499313594685\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.4556714471968709,\n \"acc_stderr\": 0.012719949543032218,\n \"acc_norm\": 0.4556714471968709,\n \"acc_norm_stderr\": 0.012719949543032218\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.6397058823529411,\n \"acc_stderr\": 0.029163128570670733,\n \"acc_norm\": 0.6397058823529411,\n \"acc_norm_stderr\": 0.029163128570670733\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.6552287581699346,\n \"acc_stderr\": 0.019228322018696647,\n \"acc_norm\": 0.6552287581699346,\n \"acc_norm_stderr\": 0.019228322018696647\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6636363636363637,\n \"acc_stderr\": 0.04525393596302505,\n \"acc_norm\": 0.6636363636363637,\n \"acc_norm_stderr\": 0.04525393596302505\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.7306122448979592,\n \"acc_stderr\": 0.02840125202902294,\n \"acc_norm\": 0.7306122448979592,\n \"acc_norm_stderr\": 0.02840125202902294\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.8258706467661692,\n \"acc_stderr\": 0.026814951200421606,\n \"acc_norm\": 0.8258706467661692,\n \"acc_norm_stderr\": 0.026814951200421606\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.87,\n \"acc_stderr\": 0.033799766898963086,\n \"acc_norm\": 0.87,\n \"acc_norm_stderr\": 0.033799766898963086\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.4759036144578313,\n \"acc_stderr\": 0.03887971849597264,\n \"acc_norm\": 0.4759036144578313,\n \"acc_norm_stderr\": 0.03887971849597264\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.7953216374269005,\n \"acc_stderr\": 0.030944459778533197,\n \"acc_norm\": 0.7953216374269005,\n \"acc_norm_stderr\": 0.030944459778533197\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.3623011015911873,\n \"mc1_stderr\": 0.016826646897262258,\n \"mc2\": 0.5289709889013895,\n \"mc2_stderr\": 0.015151269954401329\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7640094711917916,\n \"acc_stderr\": 0.011933828850275623\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.02122820318423048,\n \"acc_stderr\": 0.003970449129848636\n }\n}\n```", "repo_url": "https://huggingface.co/bartowski/internlm2-math-20b-llama", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_01_25T05_51_31.434464", "path": ["**/details_harness|arc:challenge|25_2024-01-25T05-51-31.434464.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-01-25T05-51-31.434464.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_01_25T05_51_31.434464", "path": ["**/details_harness|gsm8k|5_2024-01-25T05-51-31.434464.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-01-25T05-51-31.434464.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_01_25T05_51_31.434464", "path": ["**/details_harness|hellaswag|10_2024-01-25T05-51-31.434464.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-01-25T05-51-31.434464.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_01_25T05_51_31.434464", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-25T05-51-31.434464.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-25T05-51-31.434464.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-25T05-51-31.434464.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-25T05-51-31.434464.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-25T05-51-31.434464.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-25T05-51-31.434464.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-25T05-51-31.434464.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-25T05-51-31.434464.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-25T05-51-31.434464.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-25T05-51-31.434464.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-25T05-51-31.434464.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-25T05-51-31.434464.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-25T05-51-31.434464.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-25T05-51-31.434464.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-25T05-51-31.434464.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-25T05-51-31.434464.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-25T05-51-31.434464.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-25T05-51-31.434464.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-25T05-51-31.434464.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-25T05-51-31.434464.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-25T05-51-31.434464.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-25T05-51-31.434464.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-25T05-51-31.434464.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-25T05-51-31.434464.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-25T05-51-31.434464.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-25T05-51-31.434464.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-25T05-51-31.434464.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-25T05-51-31.434464.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-25T05-51-31.434464.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-25T05-51-31.434464.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-25T05-51-31.434464.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-25T05-51-31.434464.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-25T05-51-31.434464.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-25T05-51-31.434464.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-25T05-51-31.434464.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-25T05-51-31.434464.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-25T05-51-31.434464.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-25T05-51-31.434464.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-25T05-51-31.434464.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-25T05-51-31.434464.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-25T05-51-31.434464.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-25T05-51-31.434464.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-25T05-51-31.434464.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-25T05-51-31.434464.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-25T05-51-31.434464.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-25T05-51-31.434464.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-25T05-51-31.434464.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-25T05-51-31.434464.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-25T05-51-31.434464.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-25T05-51-31.434464.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-25T05-51-31.434464.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-25T05-51-31.434464.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-25T05-51-31.434464.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-25T05-51-31.434464.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-25T05-51-31.434464.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-25T05-51-31.434464.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-25T05-51-31.434464.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-25T05-51-31.434464.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-25T05-51-31.434464.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-25T05-51-31.434464.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-25T05-51-31.434464.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-25T05-51-31.434464.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-25T05-51-31.434464.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-25T05-51-31.434464.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-25T05-51-31.434464.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-25T05-51-31.434464.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-25T05-51-31.434464.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-25T05-51-31.434464.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-25T05-51-31.434464.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-25T05-51-31.434464.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-25T05-51-31.434464.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-25T05-51-31.434464.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-25T05-51-31.434464.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-25T05-51-31.434464.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-25T05-51-31.434464.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-25T05-51-31.434464.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-25T05-51-31.434464.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-25T05-51-31.434464.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-25T05-51-31.434464.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-25T05-51-31.434464.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-25T05-51-31.434464.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-25T05-51-31.434464.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-25T05-51-31.434464.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-25T05-51-31.434464.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-25T05-51-31.434464.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-25T05-51-31.434464.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-25T05-51-31.434464.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-25T05-51-31.434464.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-25T05-51-31.434464.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-25T05-51-31.434464.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-25T05-51-31.434464.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-25T05-51-31.434464.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-25T05-51-31.434464.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-25T05-51-31.434464.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-25T05-51-31.434464.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-25T05-51-31.434464.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-25T05-51-31.434464.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-25T05-51-31.434464.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-25T05-51-31.434464.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-25T05-51-31.434464.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-25T05-51-31.434464.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-25T05-51-31.434464.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-25T05-51-31.434464.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-25T05-51-31.434464.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-25T05-51-31.434464.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-25T05-51-31.434464.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-25T05-51-31.434464.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-25T05-51-31.434464.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-25T05-51-31.434464.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-25T05-51-31.434464.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-25T05-51-31.434464.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-25T05-51-31.434464.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-25T05-51-31.434464.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-25T05-51-31.434464.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_01_25T05_51_31.434464", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-25T05-51-31.434464.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-25T05-51-31.434464.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_01_25T05_51_31.434464", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-25T05-51-31.434464.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-25T05-51-31.434464.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_01_25T05_51_31.434464", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-25T05-51-31.434464.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-25T05-51-31.434464.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_01_25T05_51_31.434464", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-25T05-51-31.434464.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-25T05-51-31.434464.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_01_25T05_51_31.434464", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-25T05-51-31.434464.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-25T05-51-31.434464.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_01_25T05_51_31.434464", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-25T05-51-31.434464.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-25T05-51-31.434464.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_01_25T05_51_31.434464", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-25T05-51-31.434464.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-25T05-51-31.434464.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_01_25T05_51_31.434464", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-25T05-51-31.434464.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-25T05-51-31.434464.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_01_25T05_51_31.434464", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-25T05-51-31.434464.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-25T05-51-31.434464.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_01_25T05_51_31.434464", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-25T05-51-31.434464.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-25T05-51-31.434464.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_01_25T05_51_31.434464", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-25T05-51-31.434464.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-25T05-51-31.434464.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_01_25T05_51_31.434464", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-25T05-51-31.434464.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-25T05-51-31.434464.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_01_25T05_51_31.434464", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-25T05-51-31.434464.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-25T05-51-31.434464.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_01_25T05_51_31.434464", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-25T05-51-31.434464.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-25T05-51-31.434464.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_01_25T05_51_31.434464", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-25T05-51-31.434464.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-25T05-51-31.434464.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_01_25T05_51_31.434464", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-25T05-51-31.434464.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-25T05-51-31.434464.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_01_25T05_51_31.434464", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-25T05-51-31.434464.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-25T05-51-31.434464.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_01_25T05_51_31.434464", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-25T05-51-31.434464.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-25T05-51-31.434464.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_01_25T05_51_31.434464", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-25T05-51-31.434464.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-25T05-51-31.434464.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_01_25T05_51_31.434464", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-25T05-51-31.434464.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-25T05-51-31.434464.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_01_25T05_51_31.434464", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-25T05-51-31.434464.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-25T05-51-31.434464.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_01_25T05_51_31.434464", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-25T05-51-31.434464.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-25T05-51-31.434464.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_01_25T05_51_31.434464", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-25T05-51-31.434464.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-25T05-51-31.434464.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_01_25T05_51_31.434464", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-25T05-51-31.434464.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-25T05-51-31.434464.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_01_25T05_51_31.434464", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-25T05-51-31.434464.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-25T05-51-31.434464.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_01_25T05_51_31.434464", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-25T05-51-31.434464.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-25T05-51-31.434464.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_01_25T05_51_31.434464", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-25T05-51-31.434464.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-25T05-51-31.434464.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_01_25T05_51_31.434464", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-25T05-51-31.434464.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-25T05-51-31.434464.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_01_25T05_51_31.434464", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-25T05-51-31.434464.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-25T05-51-31.434464.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_01_25T05_51_31.434464", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-25T05-51-31.434464.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-25T05-51-31.434464.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_01_25T05_51_31.434464", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-25T05-51-31.434464.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-25T05-51-31.434464.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_01_25T05_51_31.434464", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-25T05-51-31.434464.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-25T05-51-31.434464.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_01_25T05_51_31.434464", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-25T05-51-31.434464.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-25T05-51-31.434464.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_01_25T05_51_31.434464", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-25T05-51-31.434464.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-25T05-51-31.434464.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_01_25T05_51_31.434464", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-25T05-51-31.434464.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-25T05-51-31.434464.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_01_25T05_51_31.434464", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-25T05-51-31.434464.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-25T05-51-31.434464.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_01_25T05_51_31.434464", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-25T05-51-31.434464.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-25T05-51-31.434464.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_01_25T05_51_31.434464", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-25T05-51-31.434464.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-25T05-51-31.434464.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_01_25T05_51_31.434464", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-25T05-51-31.434464.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-25T05-51-31.434464.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_01_25T05_51_31.434464", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-25T05-51-31.434464.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-25T05-51-31.434464.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_01_25T05_51_31.434464", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-25T05-51-31.434464.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-25T05-51-31.434464.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_01_25T05_51_31.434464", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-25T05-51-31.434464.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-25T05-51-31.434464.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_01_25T05_51_31.434464", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-25T05-51-31.434464.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-25T05-51-31.434464.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_01_25T05_51_31.434464", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-25T05-51-31.434464.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-25T05-51-31.434464.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_01_25T05_51_31.434464", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-25T05-51-31.434464.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-25T05-51-31.434464.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_01_25T05_51_31.434464", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-25T05-51-31.434464.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-25T05-51-31.434464.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_01_25T05_51_31.434464", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-25T05-51-31.434464.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-25T05-51-31.434464.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_01_25T05_51_31.434464", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-25T05-51-31.434464.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-25T05-51-31.434464.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_01_25T05_51_31.434464", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-25T05-51-31.434464.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-25T05-51-31.434464.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_01_25T05_51_31.434464", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-25T05-51-31.434464.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-25T05-51-31.434464.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_01_25T05_51_31.434464", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-25T05-51-31.434464.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-25T05-51-31.434464.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_01_25T05_51_31.434464", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-25T05-51-31.434464.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-25T05-51-31.434464.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_01_25T05_51_31.434464", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-25T05-51-31.434464.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-25T05-51-31.434464.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_01_25T05_51_31.434464", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-25T05-51-31.434464.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-25T05-51-31.434464.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_01_25T05_51_31.434464", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-25T05-51-31.434464.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-25T05-51-31.434464.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_01_25T05_51_31.434464", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-25T05-51-31.434464.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-25T05-51-31.434464.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_01_25T05_51_31.434464", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-25T05-51-31.434464.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-25T05-51-31.434464.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_01_25T05_51_31.434464", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-25T05-51-31.434464.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-25T05-51-31.434464.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_01_25T05_51_31.434464", "path": ["**/details_harness|winogrande|5_2024-01-25T05-51-31.434464.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-01-25T05-51-31.434464.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_01_25T05_51_31.434464", "path": ["results_2024-01-25T05-51-31.434464.parquet"]}, {"split": "latest", "path": ["results_2024-01-25T05-51-31.434464.parquet"]}]}]}
2024-01-25T05:54:02+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of bartowski/internlm2-math-20b-llama Dataset automatically created during the evaluation run of model bartowski/internlm2-math-20b-llama on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-01-25T05:51:31.434464(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of bartowski/internlm2-math-20b-llama\n\n\n\nDataset automatically created during the evaluation run of model bartowski/internlm2-math-20b-llama on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-25T05:51:31.434464(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of bartowski/internlm2-math-20b-llama\n\n\n\nDataset automatically created during the evaluation run of model bartowski/internlm2-math-20b-llama on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-25T05:51:31.434464(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ 6, 189, 67, 4, 40, 29, 3, 4, 9, 6, 5, 7, 4, 7, 10, 9, 5, 9, 8, 10, 46, 8, 7, 10, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of bartowski/internlm2-math-20b-llama\n\n\n\nDataset automatically created during the evaluation run of model bartowski/internlm2-math-20b-llama on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2024-01-25T05:51:31.434464(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):## Dataset Details### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:## Uses### Direct Use### Out-of-Scope Use## Dataset Structure## Dataset Creation### Curation Rationale### Source Data#### Data Collection and Processing#### Who are the source data producers?### Annotations [optional]#### Annotation process#### Who are the annotators?#### Personal and Sensitive Information## Bias, Risks, and Limitations### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:## Glossary [optional]## More Information [optional]## Dataset Card Authors [optional]## Dataset Card Contact" ]
[ -0.05440312623977661, 0.20908071100711823, -0.005312792025506496, 0.03878088667988777, 0.08562680333852768, -0.009813816286623478, 0.04399270564317703, 0.10898754000663757, 0.018128536641597748, 0.18397384881973267, -0.014837388880550861, 0.10545627772808075, 0.06869445741176605, 0.11654943972826004, 0.011466672644019127, -0.14472195506095886, 0.02366112545132637, -0.08227622509002686, 0.06902636587619781, 0.08853313326835632, 0.07336989790201187, -0.08275608718395233, 0.06400629878044128, -0.029522517696022987, 0.024162206798791885, -0.018918195739388466, -0.07045537233352661, -0.03291630372405052, 0.08574575185775757, 0.12055899947881699, 0.03428605571389198, -0.025482013821601868, 0.012282505631446838, -0.25914955139160156, 0.01501508615911007, 0.09596859663724899, -0.010544011369347572, 0.038283947855234146, 0.13310083746910095, -0.07542087137699127, 0.09049302339553833, -0.039502762258052826, 0.06716175377368927, 0.05235324054956436, -0.12043546140193939, -0.14737915992736816, -0.14742964506149292, -0.0051379017531871796, 0.0780971348285675, 0.04599892348051071, -0.02934372052550316, 0.13569098711013794, -0.057224661111831665, 0.05422027409076691, 0.13713867962360382, -0.11439809203147888, -0.023275192826986313, 0.05240138992667198, 0.01188335008919239, 0.0643252283334732, -0.09287872910499573, -0.014875577762722969, 0.03468666970729828, 0.048916615545749664, 0.028518762439489365, 0.008486911654472351, 0.00037190550938248634, 0.02091808430850506, -0.14510393142700195, -0.13582053780555725, 0.13643625378608704, 0.01061897911131382, -0.04697307199239731, -0.18480375409126282, -0.009443435817956924, 0.010786809958517551, 0.005880543030798435, -0.01021597906947136, -0.0025117057375609875, -0.0280299112200737, 0.09119410067796707, -0.009721740148961544, -0.09708112478256226, -0.02372116409242153, -0.004823506344109774, 0.10512520372867584, 0.013776147738099098, -0.012333828024566174, 0.012885335832834244, 0.10437510907649994, -0.015197229571640491, -0.07995012402534485, -0.07147584855556488, -0.048451945185661316, -0.11358001083135605, -0.03904350847005844, 0.005152488127350807, -0.07739026844501495, 0.027116402983665466, 0.23267459869384766, -0.013533897697925568, 0.03077048808336258, -0.09399334341287613, 0.012816027738153934, 0.11186259239912033, 0.06504365801811218, -0.07350780069828033, -0.0481652095913887, -0.03669915720820427, 0.024940185248851776, 0.037465084344148636, -0.02175077050924301, 0.009362945333123207, 0.06314153969287872, 0.028404127806425095, 0.11403414607048035, 0.12056638300418854, 0.03643179312348366, -0.06807710230350494, -0.02838713675737381, 0.2201032042503357, -0.13446184992790222, -0.013005419634282589, 0.017677823081612587, -0.046638961881399155, -0.11146248131990433, 0.06302327662706375, 0.0012273453176021576, -0.047103263437747955, 0.16237568855285645, -0.044803686439991, -0.06982548534870148, -0.07350622117519379, -0.06734024733304977, 0.060499124228954315, 0.03242804855108261, -0.04355764761567116, -0.07881778478622437, -0.07327024638652802, -0.0901448130607605, 0.029455535113811493, -0.06030323728919029, -0.01418178342282772, 0.01988808438181877, -0.011721016839146614, -0.006968630477786064, -0.012969990260899067, 0.10380984842777252, -0.05305739864706993, 0.03954432159662247, 0.0014907156582921743, 0.018787024542689323, 0.08959464728832245, 0.04334789514541626, -0.1183273047208786, 0.07219511270523071, -0.15240389108657837, 0.0812913328409195, -0.11447183787822723, -0.004618350882083178, -0.1291137933731079, -0.0198320671916008, -0.015094959177076817, 0.04260705038905144, -0.01674330234527588, 0.09488829970359802, -0.20915275812149048, 0.0016869258834049106, 0.14115513861179352, -0.1127740889787674, -0.08080621063709259, 0.08778920769691467, -0.048158854246139526, 0.05474467575550079, 0.04230363294482231, 0.12431551516056061, 0.10828399658203125, -0.06494860351085663, -0.08900125324726105, -0.08082380890846252, -0.024500073865056038, 0.1525537073612213, 0.06998783349990845, -0.07810566574335098, 0.09375857561826706, 0.04784783348441124, -0.017762385308742523, -0.08793671429157257, -0.004592974204570055, -0.06287317723035812, -0.01039944402873516, -0.06579182296991348, -0.04403527081012726, -0.009205124340951443, -0.07622039318084717, -0.01578623615205288, -0.09251140058040619, -0.01575050875544548, 0.08778916299343109, -0.019940664991736412, 0.009594587609171867, -0.06302139908075333, 0.03781285136938095, 0.011190577410161495, 0.011837508529424667, -0.2097688615322113, -0.09346763044595718, 0.024853400886058807, -0.18437424302101135, 0.05017228424549103, 0.029441319406032562, 0.01680017076432705, 0.0389542356133461, -0.015002254396677017, 0.03293273597955704, 0.03663221746683121, -0.01277436688542366, -0.01606394723057747, -0.14556559920310974, -0.05674616992473602, -0.08532920479774475, 0.06134313717484474, -0.14661459624767303, -0.02058982104063034, 0.08184798061847687, 0.1613515019416809, 0.025697244331240654, -0.08869561553001404, 0.06623433530330658, 0.016420934349298477, -0.03715456277132034, -0.05165528506040573, -0.003604547120630741, -0.02739303931593895, 0.02465018630027771, 0.025807060301303864, -0.20800776779651642, -0.09624441713094711, 0.07454858720302582, 0.1259879767894745, -0.07236075401306152, -0.10861742496490479, -0.05646058917045593, -0.058098748326301575, -0.08144921809434891, -0.0694897398352623, 0.07253581285476685, 0.08331079035997391, 0.044419288635253906, -0.05891137570142746, -0.05346702039241791, 0.01563410833477974, 0.05743724852800369, -0.06744910031557083, 0.11706943809986115, 0.07023102045059204, -0.0731712132692337, 0.10375897586345673, -0.02881946787238121, 0.10014098882675171, 0.06532251834869385, 0.0338946208357811, -0.09415769577026367, -0.0055915070697665215, 0.06349223107099533, 0.05027502030134201, 0.08055667579174042, -0.04518021643161774, 0.03234735131263733, 0.0821639820933342, -0.0026179682463407516, 0.029042018577456474, -0.061259061098098755, 0.02641967684030533, 0.04269900918006897, 0.007595641538500786, 0.014691349118947983, 0.011020412668585777, 0.0032147204037755728, 0.07765158265829086, 0.035428646951913834, 0.10091507434844971, -0.012499647215008736, -0.05233647674322128, -0.09824195504188538, 0.1466415524482727, -0.07859160006046295, -0.2904403805732727, -0.1644880473613739, -0.03071838803589344, -0.036588817834854126, -0.016358958557248116, 0.07259848713874817, -0.014917364344000816, -0.10787709057331085, -0.10277281701564789, 0.05694844573736191, 0.023315878584980965, -0.12439584732055664, -0.06819506734609604, 0.05548156797885895, -0.0010396633297204971, -0.16210266947746277, 0.04070896655321121, 0.04399009048938751, -0.03455007076263428, -0.0013428758829832077, 0.08515509963035583, 0.12201730161905289, 0.07498347014188766, 0.07210655510425568, -0.021076545119285583, -0.014895033091306686, 0.18259228765964508, -0.11822963505983353, 0.025816217064857483, 0.10100673139095306, -0.052218999713659286, 0.06818868964910507, 0.17421817779541016, 0.01886863261461258, -0.09179215878248215, 0.0532928891479969, 0.09153562784194946, -0.07084809243679047, -0.238253653049469, -0.11630412191152573, -0.031700510531663895, 0.009929172694683075, 0.11307866871356964, 0.05799480900168419, 0.022913159802556038, 0.009134124964475632, -0.11936773359775543, -0.028356462717056274, -0.05461816117167473, 0.078250452876091, 0.08396336436271667, 0.0027280873619019985, 0.04701708257198334, -0.03763803094625473, 0.02204723097383976, 0.11905260384082794, 0.028670595958828926, 0.15282344818115234, -0.03849228471517563, 0.1723650097846985, 0.08054153621196747, 0.07989740371704102, -0.03281533345580101, 0.04149026423692703, -0.008459726348519325, 0.060597293078899384, -0.018583934754133224, -0.10917402803897858, -0.060317862778902054, 0.11204415559768677, 0.027910204604268074, -0.0764542669057846, 0.017212368547916412, -0.08396612107753754, 0.036818284541368484, 0.18589076399803162, -0.02933588996529579, -0.13466927409172058, -0.06687320023775101, 0.05049112066626549, -0.035222359001636505, -0.08526566624641418, -0.007286118343472481, 0.0786476880311966, -0.1379750370979309, 0.011786315590143204, -0.0444067046046257, 0.0816035121679306, -0.14024372398853302, -0.017338469624519348, -0.005759294144809246, 0.04989122599363327, -0.008488258346915245, 0.11533184349536896, -0.1618689000606537, 0.11043893545866013, 0.0015391218475997448, 0.019487734884023666, -0.11470282077789307, 0.040388189256191254, -0.03793994337320328, -0.06916569918394089, 0.1312478482723236, -0.005043945740908384, -0.10329504311084747, -0.06350354850292206, -0.10058260709047318, -0.010909074917435646, 0.07754155993461609, -0.10661278665065765, 0.10579223930835724, 0.02938411384820938, -0.02234046906232834, -0.02994965948164463, -0.022351788356900215, -0.09262587130069733, -0.22922006249427795, 0.1172163337469101, -0.10051435977220535, 0.06882455199956894, -0.06318598240613937, -0.04932231456041336, -0.028887465596199036, 0.15543542802333832, -0.08427651226520538, -0.05604231357574463, -0.12099919468164444, -0.0009849760681390762, 0.1836719512939453, -0.047730375081300735, 0.06861712038516998, -0.03667420148849487, 0.17573124170303345, -0.005097705405205488, -0.04331177845597267, 0.007939784787595272, -0.09133334457874298, -0.18108513951301575, -0.04841797053813934, 0.12129797041416168, 0.0708736777305603, 0.0171370767056942, -0.008131129667162895, 0.031484201550483704, 0.019761769101023674, -0.10114166140556335, 0.032816626131534576, 0.11622057855129242, 0.11087721586227417, 0.04073600098490715, -0.008522038348019123, -0.09630755335092545, -0.10799345374107361, -0.10521730780601501, 0.04009736329317093, 0.15242823958396912, -0.06958393007516861, 0.16910485923290253, 0.13574719429016113, -0.08130457997322083, -0.17726092040538788, -0.05500680208206177, 0.022270694375038147, -0.021840790286660194, 0.12499967217445374, -0.19816455245018005, 0.06252607703208923, 0.0640566423535347, -0.026850692927837372, 0.12005335092544556, -0.24812033772468567, -0.13010793924331665, 0.018954668194055557, 0.04385971650481224, -0.22813068330287933, -0.1785297691822052, -0.11954814195632935, -0.021469775587320328, -0.15725760161876678, 0.12255767732858658, 0.029575848951935768, 0.03034684807062149, -0.016734866425395012, 0.09770756959915161, 0.05938265845179558, -0.06836649775505066, 0.13289132714271545, -0.006757579743862152, 0.017973463982343674, -0.10620225965976715, -0.038041483610868454, -0.012313226237893105, -0.04422985017299652, 0.07760867476463318, 0.00975668802857399, 0.05631560832262039, -0.10097195208072662, -0.03810921683907509, -0.06293106079101562, 0.042894501239061356, -0.07500296831130981, -0.054767388850450516, -0.06987134367227554, 0.08432887494564056, 0.08222434669733047, -0.014247076585888863, 0.020414924249053, -0.046110183000564575, 0.044899459928274155, 0.21444740891456604, 0.11182282865047455, 0.04943337291479111, -0.12885233759880066, -0.030861083418130875, -0.014942082576453686, 0.004131353925913572, -0.13420207798480988, 0.03970613330602646, 0.0969652310013771, 0.0531565360724926, 0.07938054203987122, -0.027098100632429123, -0.17066079378128052, 0.0028118761256337166, 0.08169013261795044, -0.110185407102108, -0.19486059248447418, 0.032108575105667114, 0.15206927061080933, -0.15438246726989746, -0.05946562439203262, 0.08953551948070526, 0.013747705146670341, -0.0297105573117733, 0.0008534149965271354, 0.07649386674165726, 0.06096523255109787, 0.10770603269338608, 0.020543983206152916, 0.05538947135210037, -0.07072249799966812, 0.08205165714025497, 0.13307052850723267, -0.1373199075460434, 0.020284093916416168, 0.04561857879161835, -0.05777908116579056, -0.06847792863845825, -0.01397424004971981, -0.016847215592861176, 0.009721110574901104, -0.035957854241132736, 0.017063163220882416, -0.01744869165122509, 0.05310206860303879, 0.12699463963508606, 0.00777028501033783, 0.04076709970831871, 0.0183416698127985, -0.0009582326747477055, -0.06836213916540146, 0.09768230468034744, 0.040470898151397705, 0.0424874983727932, -0.0414273664355278, 0.028776422142982483, 0.003602289129048586, -0.039514318108558655, 0.017213858664035797, -0.040108077228069305, -0.078325554728508, -0.0015319832600653172, -0.15669375658035278, 0.05347292870283127, -0.08871188759803772, 0.007948148995637894, -0.0031300901900976896, -0.03185005858540535, -0.002631774637848139, 0.005884058773517609, -0.07172980904579163, -0.04601137340068817, -0.04761333018541336, 0.1158054769039154, -0.1912795901298523, -0.003647069213911891, 0.0861785039305687, -0.06868845224380493, 0.0740957111120224, -0.003933960106223822, -0.022041290998458862, 0.015095505863428116, -0.08050204068422318, 0.00372914201579988, -0.028897512704133987, 0.046194590628147125, 0.010951444506645203, -0.146872416138649, -0.018210645765066147, 0.00009529059752821922, -0.07060573995113373, -0.006612775847315788, 0.03169483691453934, -0.16558970510959625, 0.03849000483751297, 0.08309878408908844, -0.04617157578468323, -0.04788120090961456, 0.046011149883270264, 0.05664963647723198, -0.012558949179947376, 0.10019664466381073, -0.0015180041082203388, 0.05038510262966156, -0.14151063561439514, -0.041117340326309204, 0.00046951393596827984, 0.0022486590314656496, 0.018253576010465622, 0.024128679186105728, 0.034569233655929565, 0.005460187792778015, 0.21189482510089874, 0.0033504145685583353, 0.04591086879372597, 0.02953876182436943, -0.018930787220597267, -0.014663921669125557, 0.030764980241656303, 0.027583133429288864, 0.004224416799843311, 0.013271672651171684, 0.015351872891187668, -0.02750120684504509, -0.06370262801647186, -0.03193449229001999, 0.06464938819408417, 0.1627732515335083, 0.16090027987957, -0.04119759798049927, 0.05626338720321655, -0.15994152426719666, -0.0742138922214508, 0.027600126340985298, -0.030698951333761215, 0.05846501886844635, -0.07613813877105713, 0.06447778642177582, 0.06963460147380829, -0.10505926609039307, 0.14776790142059326, -0.06866122782230377, -0.026348015293478966, -0.020070604979991913, -0.16828684508800507, -0.039045803248882294, 0.003960433416068554, 0.005293943919241428, -0.09531223773956299, 0.12192589789628983, 0.13066303730010986, -0.01036603283137083, -0.004864389076828957, 0.09438717365264893, -0.051720473915338516, -0.05589413270354271, -0.006499244831502438, 0.004667838104069233, 0.011931603774428368, 0.007849583402276039, 0.06918737292289734, 0.012416900135576725, 0.05471969395875931, 0.06682849675416946, 0.10044781863689423, 0.03308241814374924, 0.0028270985931158066, -0.04065811261534691, -0.06659549474716187, -0.0005020701792091131, -0.011946635320782661, -0.04200031980872154, 0.21427130699157715, 0.04468994960188866, 0.01606297492980957, 0.006360779982060194, 0.22828084230422974, -0.012475153431296349, -0.05160252004861832, -0.1273096352815628, 0.14288443326950073, -0.004061178304255009, 0.019412556663155556, 0.01156807690858841, -0.12575659155845642, 0.040269073098897934, 0.17172706127166748, 0.10018192231655121, 0.0544436015188694, 0.009534701704978943, 0.036129966378211975, 0.026114363223314285, -0.01579807884991169, 0.045807238668203354, 0.023190218955278397, 0.2331361472606659, -0.0539989173412323, 0.0589958056807518, -0.0070369429886341095, -0.010370096191763878, -0.009832952171564102, 0.09483062475919724, -0.0331386998295784, 0.011737431399524212, -0.07481810450553894, 0.10660426318645477, -0.04809786379337311, -0.2669820189476013, -0.015550579875707626, -0.08992811292409897, -0.13108618557453156, -0.01628824695944786, 0.0412176251411438, -0.002897395985201001, 0.0539909265935421, 0.0296630896627903, -0.037764713168144226, 0.18198001384735107, 0.008753347210586071, -0.07917433977127075, -0.0746423602104187, 0.06576034426689148, -0.03208877518773079, 0.264546275138855, -0.0023319274187088013, 0.07170417904853821, 0.09906010329723358, -0.01682833395898342, -0.14292529225349426, 0.007861862890422344, 0.09815052151679993, -0.07323619723320007, 0.05301641672849655, 0.17465892434120178, -0.02311624586582184, 0.15450754761695862, 0.03715994954109192, -0.04115058481693268, 0.06747999787330627, 0.07338325679302216, 0.04141246899962425, -0.09320585429668427, 0.06590013206005096, -0.08379043638706207, 0.13735441863536835, 0.1193883940577507, -0.02388145588338375, -0.008535167202353477, -0.046574339270591736, 0.0672130435705185, -0.023417802527546883, 0.1326066553592682, -0.011882597580552101, -0.16319842636585236, 0.03746183216571808, 0.018953703343868256, 0.04994921013712883, -0.2532411813735962, -0.061955735087394714, 0.10995990037918091, -0.03895937651395798, 0.023595310747623444, 0.08060891181230545, 0.04396619647741318, 0.009577494114637375, -0.06501016020774841, -0.10080686211585999, 0.0016648482996970415, 0.13018956780433655, -0.08543093502521515, -0.03548309952020645 ]
64b2a0245215796fe4715d30b80726237001890d
# Dataset Card for Evaluation run of SJ-Donald/SOLAR-10.7B-slerp <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [SJ-Donald/SOLAR-10.7B-slerp](https://huggingface.co/SJ-Donald/SOLAR-10.7B-slerp) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_SJ-Donald__SOLAR-10.7B-slerp", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-01-25T05:52:30.041619](https://huggingface.co/datasets/open-llm-leaderboard/details_SJ-Donald__SOLAR-10.7B-slerp/blob/main/results_2024-01-25T05-52-30.041619.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.6693576990621962, "acc_stderr": 0.031454814037401475, "acc_norm": 0.6709568764499055, "acc_norm_stderr": 0.03209310283459356, "mc1": 0.5091799265605875, "mc1_stderr": 0.017500550724819756, "mc2": 0.674246091155489, "mc2_stderr": 0.014911205444372602 }, "harness|arc:challenge|25": { "acc": 0.6501706484641638, "acc_stderr": 0.013936809212158296, "acc_norm": 0.681740614334471, "acc_norm_stderr": 0.013611993916971453 }, "harness|hellaswag|10": { "acc": 0.682832105158335, "acc_stderr": 0.004644223294727723, "acc_norm": 0.8691495717984465, "acc_norm_stderr": 0.003365474860676742 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.35, "acc_stderr": 0.0479372485441102, "acc_norm": 0.35, "acc_norm_stderr": 0.0479372485441102 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.5703703703703704, "acc_stderr": 0.042763494943765995, "acc_norm": 0.5703703703703704, "acc_norm_stderr": 0.042763494943765995 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.7631578947368421, "acc_stderr": 0.03459777606810535, "acc_norm": 0.7631578947368421, "acc_norm_stderr": 0.03459777606810535 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.72, "acc_stderr": 0.045126085985421276, "acc_norm": 0.72, "acc_norm_stderr": 0.045126085985421276 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.7018867924528301, "acc_stderr": 0.02815283794249387, "acc_norm": 0.7018867924528301, "acc_norm_stderr": 0.02815283794249387 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.7569444444444444, "acc_stderr": 0.0358687928008034, "acc_norm": 0.7569444444444444, "acc_norm_stderr": 0.0358687928008034 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.43, "acc_stderr": 0.049756985195624284, "acc_norm": 0.43, "acc_norm_stderr": 0.049756985195624284 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.51, "acc_stderr": 0.05024183937956912, "acc_norm": 0.51, "acc_norm_stderr": 0.05024183937956912 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.3, "acc_stderr": 0.046056618647183814, "acc_norm": 0.3, "acc_norm_stderr": 0.046056618647183814 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.6994219653179191, "acc_stderr": 0.0349610148119118, "acc_norm": 0.6994219653179191, "acc_norm_stderr": 0.0349610148119118 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.39215686274509803, "acc_stderr": 0.048580835742663454, "acc_norm": 0.39215686274509803, "acc_norm_stderr": 0.048580835742663454 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.76, "acc_stderr": 0.042923469599092816, "acc_norm": 0.76, "acc_norm_stderr": 0.042923469599092816 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.6212765957446809, "acc_stderr": 0.03170995606040655, "acc_norm": 0.6212765957446809, "acc_norm_stderr": 0.03170995606040655 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.5, "acc_stderr": 0.047036043419179864, "acc_norm": 0.5, "acc_norm_stderr": 0.047036043419179864 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.6344827586206897, "acc_stderr": 0.04013124195424386, "acc_norm": 0.6344827586206897, "acc_norm_stderr": 0.04013124195424386 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.48148148148148145, "acc_stderr": 0.02573364199183898, "acc_norm": 0.48148148148148145, "acc_norm_stderr": 0.02573364199183898 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.4444444444444444, "acc_stderr": 0.044444444444444495, "acc_norm": 0.4444444444444444, "acc_norm_stderr": 0.044444444444444495 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.37, "acc_stderr": 0.04852365870939099, "acc_norm": 0.37, "acc_norm_stderr": 0.04852365870939099 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.8064516129032258, "acc_stderr": 0.022475258525536057, "acc_norm": 0.8064516129032258, "acc_norm_stderr": 0.022475258525536057 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.47783251231527096, "acc_stderr": 0.03514528562175008, "acc_norm": 0.47783251231527096, "acc_norm_stderr": 0.03514528562175008 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.69, "acc_stderr": 0.04648231987117316, "acc_norm": 0.69, "acc_norm_stderr": 0.04648231987117316 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.8181818181818182, "acc_stderr": 0.03011768892950357, "acc_norm": 0.8181818181818182, "acc_norm_stderr": 0.03011768892950357 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.8686868686868687, "acc_stderr": 0.024063156416822516, "acc_norm": 0.8686868686868687, "acc_norm_stderr": 0.024063156416822516 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.9067357512953368, "acc_stderr": 0.02098685459328972, "acc_norm": 0.9067357512953368, "acc_norm_stderr": 0.02098685459328972 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.676923076923077, "acc_stderr": 0.023710888501970562, "acc_norm": 0.676923076923077, "acc_norm_stderr": 0.023710888501970562 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.37407407407407406, "acc_stderr": 0.029502861128955286, "acc_norm": 0.37407407407407406, "acc_norm_stderr": 0.029502861128955286 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.7226890756302521, "acc_stderr": 0.02907937453948001, "acc_norm": 0.7226890756302521, "acc_norm_stderr": 0.02907937453948001 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.3509933774834437, "acc_stderr": 0.03896981964257375, "acc_norm": 0.3509933774834437, "acc_norm_stderr": 0.03896981964257375 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.8550458715596331, "acc_stderr": 0.01509421569970048, "acc_norm": 0.8550458715596331, "acc_norm_stderr": 0.01509421569970048 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.6111111111111112, "acc_stderr": 0.033247089118091176, "acc_norm": 0.6111111111111112, "acc_norm_stderr": 0.033247089118091176 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.8676470588235294, "acc_stderr": 0.023784297520918856, "acc_norm": 0.8676470588235294, "acc_norm_stderr": 0.023784297520918856 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.869198312236287, "acc_stderr": 0.02194876605947076, "acc_norm": 0.869198312236287, "acc_norm_stderr": 0.02194876605947076 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.7174887892376681, "acc_stderr": 0.03021683101150878, "acc_norm": 0.7174887892376681, "acc_norm_stderr": 0.03021683101150878 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.7404580152671756, "acc_stderr": 0.03844876139785271, "acc_norm": 0.7404580152671756, "acc_norm_stderr": 0.03844876139785271 }, "harness|hendrycksTest-international_law|5": { "acc": 0.8099173553719008, "acc_stderr": 0.03581796951709282, "acc_norm": 0.8099173553719008, "acc_norm_stderr": 0.03581796951709282 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.7777777777777778, "acc_stderr": 0.040191074725573483, "acc_norm": 0.7777777777777778, "acc_norm_stderr": 0.040191074725573483 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.7423312883435583, "acc_stderr": 0.03436150827846917, "acc_norm": 0.7423312883435583, "acc_norm_stderr": 0.03436150827846917 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.49107142857142855, "acc_stderr": 0.04745033255489123, "acc_norm": 0.49107142857142855, "acc_norm_stderr": 0.04745033255489123 }, "harness|hendrycksTest-management|5": { "acc": 0.8155339805825242, "acc_stderr": 0.03840423627288276, "acc_norm": 0.8155339805825242, "acc_norm_stderr": 0.03840423627288276 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8717948717948718, "acc_stderr": 0.02190190511507333, "acc_norm": 0.8717948717948718, "acc_norm_stderr": 0.02190190511507333 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.79, "acc_stderr": 0.040936018074033256, "acc_norm": 0.79, "acc_norm_stderr": 0.040936018074033256 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.8160919540229885, "acc_stderr": 0.013853724170922533, "acc_norm": 0.8160919540229885, "acc_norm_stderr": 0.013853724170922533 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.7630057803468208, "acc_stderr": 0.02289408248992599, "acc_norm": 0.7630057803468208, "acc_norm_stderr": 0.02289408248992599 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.43687150837988825, "acc_stderr": 0.01658868086453063, "acc_norm": 0.43687150837988825, "acc_norm_stderr": 0.01658868086453063 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.7647058823529411, "acc_stderr": 0.0242886194660461, "acc_norm": 0.7647058823529411, "acc_norm_stderr": 0.0242886194660461 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.7363344051446945, "acc_stderr": 0.02502553850053234, "acc_norm": 0.7363344051446945, "acc_norm_stderr": 0.02502553850053234 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.7777777777777778, "acc_stderr": 0.023132376234543343, "acc_norm": 0.7777777777777778, "acc_norm_stderr": 0.023132376234543343 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.5390070921985816, "acc_stderr": 0.02973659252642444, "acc_norm": 0.5390070921985816, "acc_norm_stderr": 0.02973659252642444 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.5026075619295959, "acc_stderr": 0.012770062445433166, "acc_norm": 0.5026075619295959, "acc_norm_stderr": 0.012770062445433166 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.7352941176470589, "acc_stderr": 0.026799562024887667, "acc_norm": 0.7352941176470589, "acc_norm_stderr": 0.026799562024887667 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.7075163398692811, "acc_stderr": 0.018403415710109797, "acc_norm": 0.7075163398692811, "acc_norm_stderr": 0.018403415710109797 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.7090909090909091, "acc_stderr": 0.04350271442923243, "acc_norm": 0.7090909090909091, "acc_norm_stderr": 0.04350271442923243 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.7795918367346939, "acc_stderr": 0.02653704531214529, "acc_norm": 0.7795918367346939, "acc_norm_stderr": 0.02653704531214529 }, "harness|hendrycksTest-sociology|5": { "acc": 0.8407960199004975, "acc_stderr": 0.02587064676616913, "acc_norm": 0.8407960199004975, "acc_norm_stderr": 0.02587064676616913 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.89, "acc_stderr": 0.03144660377352203, "acc_norm": 0.89, "acc_norm_stderr": 0.03144660377352203 }, "harness|hendrycksTest-virology|5": { "acc": 0.572289156626506, "acc_stderr": 0.03851597683718533, "acc_norm": 0.572289156626506, "acc_norm_stderr": 0.03851597683718533 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.8070175438596491, "acc_stderr": 0.030267457554898458, "acc_norm": 0.8070175438596491, "acc_norm_stderr": 0.030267457554898458 }, "harness|truthfulqa:mc|0": { "mc1": 0.5091799265605875, "mc1_stderr": 0.017500550724819756, "mc2": 0.674246091155489, "mc2_stderr": 0.014911205444372602 }, "harness|winogrande|5": { "acc": 0.840568271507498, "acc_stderr": 0.010288617479454764 }, "harness|gsm8k|5": { "acc": 0.621683093252464, "acc_stderr": 0.013358407831777112 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_SJ-Donald__SOLAR-10.7B-slerp
[ "region:us" ]
2024-01-25T05:54:46+00:00
{"pretty_name": "Evaluation run of SJ-Donald/SOLAR-10.7B-slerp", "dataset_summary": "Dataset automatically created during the evaluation run of model [SJ-Donald/SOLAR-10.7B-slerp](https://huggingface.co/SJ-Donald/SOLAR-10.7B-slerp) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_SJ-Donald__SOLAR-10.7B-slerp\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-01-25T05:52:30.041619](https://huggingface.co/datasets/open-llm-leaderboard/details_SJ-Donald__SOLAR-10.7B-slerp/blob/main/results_2024-01-25T05-52-30.041619.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.6693576990621962,\n \"acc_stderr\": 0.031454814037401475,\n \"acc_norm\": 0.6709568764499055,\n \"acc_norm_stderr\": 0.03209310283459356,\n \"mc1\": 0.5091799265605875,\n \"mc1_stderr\": 0.017500550724819756,\n \"mc2\": 0.674246091155489,\n \"mc2_stderr\": 0.014911205444372602\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.6501706484641638,\n \"acc_stderr\": 0.013936809212158296,\n \"acc_norm\": 0.681740614334471,\n \"acc_norm_stderr\": 0.013611993916971453\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.682832105158335,\n \"acc_stderr\": 0.004644223294727723,\n \"acc_norm\": 0.8691495717984465,\n \"acc_norm_stderr\": 0.003365474860676742\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.35,\n \"acc_stderr\": 0.0479372485441102,\n \"acc_norm\": 0.35,\n \"acc_norm_stderr\": 0.0479372485441102\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.5703703703703704,\n \"acc_stderr\": 0.042763494943765995,\n \"acc_norm\": 0.5703703703703704,\n \"acc_norm_stderr\": 0.042763494943765995\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.7631578947368421,\n \"acc_stderr\": 0.03459777606810535,\n \"acc_norm\": 0.7631578947368421,\n \"acc_norm_stderr\": 0.03459777606810535\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.72,\n \"acc_stderr\": 0.045126085985421276,\n \"acc_norm\": 0.72,\n \"acc_norm_stderr\": 0.045126085985421276\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.7018867924528301,\n \"acc_stderr\": 0.02815283794249387,\n \"acc_norm\": 0.7018867924528301,\n \"acc_norm_stderr\": 0.02815283794249387\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.7569444444444444,\n \"acc_stderr\": 0.0358687928008034,\n \"acc_norm\": 0.7569444444444444,\n \"acc_norm_stderr\": 0.0358687928008034\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.43,\n \"acc_stderr\": 0.049756985195624284,\n \"acc_norm\": 0.43,\n \"acc_norm_stderr\": 0.049756985195624284\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.51,\n \"acc_stderr\": 0.05024183937956912,\n \"acc_norm\": 0.51,\n \"acc_norm_stderr\": 0.05024183937956912\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.3,\n \"acc_stderr\": 0.046056618647183814,\n \"acc_norm\": 0.3,\n \"acc_norm_stderr\": 0.046056618647183814\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.6994219653179191,\n \"acc_stderr\": 0.0349610148119118,\n \"acc_norm\": 0.6994219653179191,\n \"acc_norm_stderr\": 0.0349610148119118\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.39215686274509803,\n \"acc_stderr\": 0.048580835742663454,\n \"acc_norm\": 0.39215686274509803,\n \"acc_norm_stderr\": 0.048580835742663454\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.76,\n \"acc_stderr\": 0.042923469599092816,\n \"acc_norm\": 0.76,\n \"acc_norm_stderr\": 0.042923469599092816\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.6212765957446809,\n \"acc_stderr\": 0.03170995606040655,\n \"acc_norm\": 0.6212765957446809,\n \"acc_norm_stderr\": 0.03170995606040655\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.5,\n \"acc_stderr\": 0.047036043419179864,\n \"acc_norm\": 0.5,\n \"acc_norm_stderr\": 0.047036043419179864\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.6344827586206897,\n \"acc_stderr\": 0.04013124195424386,\n \"acc_norm\": 0.6344827586206897,\n \"acc_norm_stderr\": 0.04013124195424386\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.48148148148148145,\n \"acc_stderr\": 0.02573364199183898,\n \"acc_norm\": 0.48148148148148145,\n \"acc_norm_stderr\": 0.02573364199183898\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.4444444444444444,\n \"acc_stderr\": 0.044444444444444495,\n \"acc_norm\": 0.4444444444444444,\n \"acc_norm_stderr\": 0.044444444444444495\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.37,\n \"acc_stderr\": 0.04852365870939099,\n \"acc_norm\": 0.37,\n \"acc_norm_stderr\": 0.04852365870939099\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.8064516129032258,\n \"acc_stderr\": 0.022475258525536057,\n \"acc_norm\": 0.8064516129032258,\n \"acc_norm_stderr\": 0.022475258525536057\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.47783251231527096,\n \"acc_stderr\": 0.03514528562175008,\n \"acc_norm\": 0.47783251231527096,\n \"acc_norm_stderr\": 0.03514528562175008\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.69,\n \"acc_stderr\": 0.04648231987117316,\n \"acc_norm\": 0.69,\n \"acc_norm_stderr\": 0.04648231987117316\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.8181818181818182,\n \"acc_stderr\": 0.03011768892950357,\n \"acc_norm\": 0.8181818181818182,\n \"acc_norm_stderr\": 0.03011768892950357\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.8686868686868687,\n \"acc_stderr\": 0.024063156416822516,\n \"acc_norm\": 0.8686868686868687,\n \"acc_norm_stderr\": 0.024063156416822516\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.9067357512953368,\n \"acc_stderr\": 0.02098685459328972,\n \"acc_norm\": 0.9067357512953368,\n \"acc_norm_stderr\": 0.02098685459328972\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.676923076923077,\n \"acc_stderr\": 0.023710888501970562,\n \"acc_norm\": 0.676923076923077,\n \"acc_norm_stderr\": 0.023710888501970562\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.37407407407407406,\n \"acc_stderr\": 0.029502861128955286,\n \"acc_norm\": 0.37407407407407406,\n \"acc_norm_stderr\": 0.029502861128955286\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.7226890756302521,\n \"acc_stderr\": 0.02907937453948001,\n \"acc_norm\": 0.7226890756302521,\n \"acc_norm_stderr\": 0.02907937453948001\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.3509933774834437,\n \"acc_stderr\": 0.03896981964257375,\n \"acc_norm\": 0.3509933774834437,\n \"acc_norm_stderr\": 0.03896981964257375\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.8550458715596331,\n \"acc_stderr\": 0.01509421569970048,\n \"acc_norm\": 0.8550458715596331,\n \"acc_norm_stderr\": 0.01509421569970048\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.6111111111111112,\n \"acc_stderr\": 0.033247089118091176,\n \"acc_norm\": 0.6111111111111112,\n \"acc_norm_stderr\": 0.033247089118091176\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.8676470588235294,\n \"acc_stderr\": 0.023784297520918856,\n \"acc_norm\": 0.8676470588235294,\n \"acc_norm_stderr\": 0.023784297520918856\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.869198312236287,\n \"acc_stderr\": 0.02194876605947076,\n \"acc_norm\": 0.869198312236287,\n \"acc_norm_stderr\": 0.02194876605947076\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.7174887892376681,\n \"acc_stderr\": 0.03021683101150878,\n \"acc_norm\": 0.7174887892376681,\n \"acc_norm_stderr\": 0.03021683101150878\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.7404580152671756,\n \"acc_stderr\": 0.03844876139785271,\n \"acc_norm\": 0.7404580152671756,\n \"acc_norm_stderr\": 0.03844876139785271\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.8099173553719008,\n \"acc_stderr\": 0.03581796951709282,\n \"acc_norm\": 0.8099173553719008,\n \"acc_norm_stderr\": 0.03581796951709282\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.7777777777777778,\n \"acc_stderr\": 0.040191074725573483,\n \"acc_norm\": 0.7777777777777778,\n \"acc_norm_stderr\": 0.040191074725573483\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.7423312883435583,\n \"acc_stderr\": 0.03436150827846917,\n \"acc_norm\": 0.7423312883435583,\n \"acc_norm_stderr\": 0.03436150827846917\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.49107142857142855,\n \"acc_stderr\": 0.04745033255489123,\n \"acc_norm\": 0.49107142857142855,\n \"acc_norm_stderr\": 0.04745033255489123\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.8155339805825242,\n \"acc_stderr\": 0.03840423627288276,\n \"acc_norm\": 0.8155339805825242,\n \"acc_norm_stderr\": 0.03840423627288276\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8717948717948718,\n \"acc_stderr\": 0.02190190511507333,\n \"acc_norm\": 0.8717948717948718,\n \"acc_norm_stderr\": 0.02190190511507333\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.79,\n \"acc_stderr\": 0.040936018074033256,\n \"acc_norm\": 0.79,\n \"acc_norm_stderr\": 0.040936018074033256\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.8160919540229885,\n \"acc_stderr\": 0.013853724170922533,\n \"acc_norm\": 0.8160919540229885,\n \"acc_norm_stderr\": 0.013853724170922533\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.7630057803468208,\n \"acc_stderr\": 0.02289408248992599,\n \"acc_norm\": 0.7630057803468208,\n \"acc_norm_stderr\": 0.02289408248992599\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.43687150837988825,\n \"acc_stderr\": 0.01658868086453063,\n \"acc_norm\": 0.43687150837988825,\n \"acc_norm_stderr\": 0.01658868086453063\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.7647058823529411,\n \"acc_stderr\": 0.0242886194660461,\n \"acc_norm\": 0.7647058823529411,\n \"acc_norm_stderr\": 0.0242886194660461\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.7363344051446945,\n \"acc_stderr\": 0.02502553850053234,\n \"acc_norm\": 0.7363344051446945,\n \"acc_norm_stderr\": 0.02502553850053234\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.7777777777777778,\n \"acc_stderr\": 0.023132376234543343,\n \"acc_norm\": 0.7777777777777778,\n \"acc_norm_stderr\": 0.023132376234543343\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.5390070921985816,\n \"acc_stderr\": 0.02973659252642444,\n \"acc_norm\": 0.5390070921985816,\n \"acc_norm_stderr\": 0.02973659252642444\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.5026075619295959,\n \"acc_stderr\": 0.012770062445433166,\n \"acc_norm\": 0.5026075619295959,\n \"acc_norm_stderr\": 0.012770062445433166\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.7352941176470589,\n \"acc_stderr\": 0.026799562024887667,\n \"acc_norm\": 0.7352941176470589,\n \"acc_norm_stderr\": 0.026799562024887667\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.7075163398692811,\n \"acc_stderr\": 0.018403415710109797,\n \"acc_norm\": 0.7075163398692811,\n \"acc_norm_stderr\": 0.018403415710109797\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.7090909090909091,\n \"acc_stderr\": 0.04350271442923243,\n \"acc_norm\": 0.7090909090909091,\n \"acc_norm_stderr\": 0.04350271442923243\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.7795918367346939,\n \"acc_stderr\": 0.02653704531214529,\n \"acc_norm\": 0.7795918367346939,\n \"acc_norm_stderr\": 0.02653704531214529\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.8407960199004975,\n \"acc_stderr\": 0.02587064676616913,\n \"acc_norm\": 0.8407960199004975,\n \"acc_norm_stderr\": 0.02587064676616913\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.89,\n \"acc_stderr\": 0.03144660377352203,\n \"acc_norm\": 0.89,\n \"acc_norm_stderr\": 0.03144660377352203\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.572289156626506,\n \"acc_stderr\": 0.03851597683718533,\n \"acc_norm\": 0.572289156626506,\n \"acc_norm_stderr\": 0.03851597683718533\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.8070175438596491,\n \"acc_stderr\": 0.030267457554898458,\n \"acc_norm\": 0.8070175438596491,\n \"acc_norm_stderr\": 0.030267457554898458\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.5091799265605875,\n \"mc1_stderr\": 0.017500550724819756,\n \"mc2\": 0.674246091155489,\n \"mc2_stderr\": 0.014911205444372602\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.840568271507498,\n \"acc_stderr\": 0.010288617479454764\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.621683093252464,\n \"acc_stderr\": 0.013358407831777112\n }\n}\n```", "repo_url": "https://huggingface.co/SJ-Donald/SOLAR-10.7B-slerp", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_01_25T05_52_30.041619", "path": ["**/details_harness|arc:challenge|25_2024-01-25T05-52-30.041619.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-01-25T05-52-30.041619.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_01_25T05_52_30.041619", "path": ["**/details_harness|gsm8k|5_2024-01-25T05-52-30.041619.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-01-25T05-52-30.041619.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_01_25T05_52_30.041619", "path": ["**/details_harness|hellaswag|10_2024-01-25T05-52-30.041619.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-01-25T05-52-30.041619.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_01_25T05_52_30.041619", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-25T05-52-30.041619.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-25T05-52-30.041619.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-25T05-52-30.041619.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-25T05-52-30.041619.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-25T05-52-30.041619.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-25T05-52-30.041619.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-25T05-52-30.041619.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-25T05-52-30.041619.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-25T05-52-30.041619.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-25T05-52-30.041619.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-25T05-52-30.041619.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-25T05-52-30.041619.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-25T05-52-30.041619.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-25T05-52-30.041619.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-25T05-52-30.041619.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-25T05-52-30.041619.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-25T05-52-30.041619.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-25T05-52-30.041619.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-25T05-52-30.041619.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-25T05-52-30.041619.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-25T05-52-30.041619.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-25T05-52-30.041619.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-25T05-52-30.041619.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-25T05-52-30.041619.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-25T05-52-30.041619.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-25T05-52-30.041619.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-25T05-52-30.041619.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-25T05-52-30.041619.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-25T05-52-30.041619.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-25T05-52-30.041619.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-25T05-52-30.041619.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-25T05-52-30.041619.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-25T05-52-30.041619.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-25T05-52-30.041619.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-25T05-52-30.041619.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-25T05-52-30.041619.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-25T05-52-30.041619.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-25T05-52-30.041619.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-25T05-52-30.041619.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-25T05-52-30.041619.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-25T05-52-30.041619.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-25T05-52-30.041619.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-25T05-52-30.041619.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-25T05-52-30.041619.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-25T05-52-30.041619.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-25T05-52-30.041619.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-25T05-52-30.041619.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-25T05-52-30.041619.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-25T05-52-30.041619.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-25T05-52-30.041619.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-25T05-52-30.041619.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-25T05-52-30.041619.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-25T05-52-30.041619.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-25T05-52-30.041619.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-25T05-52-30.041619.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-25T05-52-30.041619.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-25T05-52-30.041619.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-25T05-52-30.041619.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-25T05-52-30.041619.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-25T05-52-30.041619.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-25T05-52-30.041619.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-25T05-52-30.041619.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-25T05-52-30.041619.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-25T05-52-30.041619.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-25T05-52-30.041619.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-25T05-52-30.041619.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-25T05-52-30.041619.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-25T05-52-30.041619.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-25T05-52-30.041619.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-25T05-52-30.041619.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-25T05-52-30.041619.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-25T05-52-30.041619.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-25T05-52-30.041619.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-25T05-52-30.041619.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-25T05-52-30.041619.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-25T05-52-30.041619.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-25T05-52-30.041619.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-25T05-52-30.041619.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-25T05-52-30.041619.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-25T05-52-30.041619.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-25T05-52-30.041619.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-25T05-52-30.041619.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-25T05-52-30.041619.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-25T05-52-30.041619.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-25T05-52-30.041619.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-25T05-52-30.041619.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-25T05-52-30.041619.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-25T05-52-30.041619.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-25T05-52-30.041619.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-25T05-52-30.041619.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-25T05-52-30.041619.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-25T05-52-30.041619.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-25T05-52-30.041619.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-25T05-52-30.041619.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-25T05-52-30.041619.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-25T05-52-30.041619.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-25T05-52-30.041619.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-25T05-52-30.041619.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-25T05-52-30.041619.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-25T05-52-30.041619.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-25T05-52-30.041619.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-25T05-52-30.041619.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-25T05-52-30.041619.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-25T05-52-30.041619.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-25T05-52-30.041619.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-25T05-52-30.041619.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-25T05-52-30.041619.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-25T05-52-30.041619.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-25T05-52-30.041619.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-25T05-52-30.041619.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-25T05-52-30.041619.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-25T05-52-30.041619.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-25T05-52-30.041619.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-25T05-52-30.041619.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_01_25T05_52_30.041619", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-25T05-52-30.041619.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-25T05-52-30.041619.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_01_25T05_52_30.041619", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-25T05-52-30.041619.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-25T05-52-30.041619.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_01_25T05_52_30.041619", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-25T05-52-30.041619.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-25T05-52-30.041619.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_01_25T05_52_30.041619", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-25T05-52-30.041619.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-25T05-52-30.041619.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_01_25T05_52_30.041619", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-25T05-52-30.041619.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-25T05-52-30.041619.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_01_25T05_52_30.041619", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-25T05-52-30.041619.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-25T05-52-30.041619.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_01_25T05_52_30.041619", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-25T05-52-30.041619.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-25T05-52-30.041619.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_01_25T05_52_30.041619", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-25T05-52-30.041619.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-25T05-52-30.041619.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_01_25T05_52_30.041619", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-25T05-52-30.041619.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-25T05-52-30.041619.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_01_25T05_52_30.041619", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-25T05-52-30.041619.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-25T05-52-30.041619.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_01_25T05_52_30.041619", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-25T05-52-30.041619.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-25T05-52-30.041619.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_01_25T05_52_30.041619", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-25T05-52-30.041619.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-25T05-52-30.041619.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_01_25T05_52_30.041619", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-25T05-52-30.041619.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-25T05-52-30.041619.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_01_25T05_52_30.041619", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-25T05-52-30.041619.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-25T05-52-30.041619.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_01_25T05_52_30.041619", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-25T05-52-30.041619.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-25T05-52-30.041619.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_01_25T05_52_30.041619", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-25T05-52-30.041619.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-25T05-52-30.041619.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_01_25T05_52_30.041619", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-25T05-52-30.041619.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-25T05-52-30.041619.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_01_25T05_52_30.041619", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-25T05-52-30.041619.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-25T05-52-30.041619.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_01_25T05_52_30.041619", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-25T05-52-30.041619.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-25T05-52-30.041619.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_01_25T05_52_30.041619", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-25T05-52-30.041619.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-25T05-52-30.041619.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_01_25T05_52_30.041619", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-25T05-52-30.041619.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-25T05-52-30.041619.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_01_25T05_52_30.041619", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-25T05-52-30.041619.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-25T05-52-30.041619.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_01_25T05_52_30.041619", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-25T05-52-30.041619.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-25T05-52-30.041619.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_01_25T05_52_30.041619", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-25T05-52-30.041619.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-25T05-52-30.041619.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_01_25T05_52_30.041619", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-25T05-52-30.041619.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-25T05-52-30.041619.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_01_25T05_52_30.041619", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-25T05-52-30.041619.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-25T05-52-30.041619.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_01_25T05_52_30.041619", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-25T05-52-30.041619.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-25T05-52-30.041619.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_01_25T05_52_30.041619", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-25T05-52-30.041619.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-25T05-52-30.041619.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_01_25T05_52_30.041619", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-25T05-52-30.041619.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-25T05-52-30.041619.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_01_25T05_52_30.041619", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-25T05-52-30.041619.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-25T05-52-30.041619.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_01_25T05_52_30.041619", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-25T05-52-30.041619.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-25T05-52-30.041619.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_01_25T05_52_30.041619", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-25T05-52-30.041619.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-25T05-52-30.041619.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_01_25T05_52_30.041619", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-25T05-52-30.041619.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-25T05-52-30.041619.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_01_25T05_52_30.041619", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-25T05-52-30.041619.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-25T05-52-30.041619.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_01_25T05_52_30.041619", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-25T05-52-30.041619.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-25T05-52-30.041619.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_01_25T05_52_30.041619", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-25T05-52-30.041619.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-25T05-52-30.041619.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_01_25T05_52_30.041619", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-25T05-52-30.041619.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-25T05-52-30.041619.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_01_25T05_52_30.041619", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-25T05-52-30.041619.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-25T05-52-30.041619.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_01_25T05_52_30.041619", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-25T05-52-30.041619.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-25T05-52-30.041619.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_01_25T05_52_30.041619", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-25T05-52-30.041619.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-25T05-52-30.041619.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_01_25T05_52_30.041619", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-25T05-52-30.041619.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-25T05-52-30.041619.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_01_25T05_52_30.041619", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-25T05-52-30.041619.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-25T05-52-30.041619.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_01_25T05_52_30.041619", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-25T05-52-30.041619.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-25T05-52-30.041619.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_01_25T05_52_30.041619", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-25T05-52-30.041619.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-25T05-52-30.041619.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_01_25T05_52_30.041619", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-25T05-52-30.041619.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-25T05-52-30.041619.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_01_25T05_52_30.041619", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-25T05-52-30.041619.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-25T05-52-30.041619.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_01_25T05_52_30.041619", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-25T05-52-30.041619.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-25T05-52-30.041619.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_01_25T05_52_30.041619", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-25T05-52-30.041619.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-25T05-52-30.041619.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_01_25T05_52_30.041619", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-25T05-52-30.041619.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-25T05-52-30.041619.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_01_25T05_52_30.041619", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-25T05-52-30.041619.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-25T05-52-30.041619.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_01_25T05_52_30.041619", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-25T05-52-30.041619.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-25T05-52-30.041619.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_01_25T05_52_30.041619", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-25T05-52-30.041619.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-25T05-52-30.041619.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_01_25T05_52_30.041619", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-25T05-52-30.041619.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-25T05-52-30.041619.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_01_25T05_52_30.041619", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-25T05-52-30.041619.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-25T05-52-30.041619.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_01_25T05_52_30.041619", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-25T05-52-30.041619.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-25T05-52-30.041619.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_01_25T05_52_30.041619", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-25T05-52-30.041619.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-25T05-52-30.041619.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_01_25T05_52_30.041619", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-25T05-52-30.041619.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-25T05-52-30.041619.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_01_25T05_52_30.041619", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-25T05-52-30.041619.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-25T05-52-30.041619.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_01_25T05_52_30.041619", "path": ["**/details_harness|winogrande|5_2024-01-25T05-52-30.041619.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-01-25T05-52-30.041619.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_01_25T05_52_30.041619", "path": ["results_2024-01-25T05-52-30.041619.parquet"]}, {"split": "latest", "path": ["results_2024-01-25T05-52-30.041619.parquet"]}]}]}
2024-01-25T05:55:07+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of SJ-Donald/SOLAR-10.7B-slerp Dataset automatically created during the evaluation run of model SJ-Donald/SOLAR-10.7B-slerp on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-01-25T05:52:30.041619(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of SJ-Donald/SOLAR-10.7B-slerp\n\n\n\nDataset automatically created during the evaluation run of model SJ-Donald/SOLAR-10.7B-slerp on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-25T05:52:30.041619(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of SJ-Donald/SOLAR-10.7B-slerp\n\n\n\nDataset automatically created during the evaluation run of model SJ-Donald/SOLAR-10.7B-slerp on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-25T05:52:30.041619(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ 6, 189, 69, 4, 40, 29, 3, 4, 9, 6, 5, 7, 4, 7, 10, 9, 5, 9, 8, 10, 46, 8, 7, 10, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of SJ-Donald/SOLAR-10.7B-slerp\n\n\n\nDataset automatically created during the evaluation run of model SJ-Donald/SOLAR-10.7B-slerp on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2024-01-25T05:52:30.041619(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):## Dataset Details### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:## Uses### Direct Use### Out-of-Scope Use## Dataset Structure## Dataset Creation### Curation Rationale### Source Data#### Data Collection and Processing#### Who are the source data producers?### Annotations [optional]#### Annotation process#### Who are the annotators?#### Personal and Sensitive Information## Bias, Risks, and Limitations### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:## Glossary [optional]## More Information [optional]## Dataset Card Authors [optional]" ]
[ -0.044657282531261444, 0.1983528584241867, -0.00442908750846982, 0.02475624904036522, 0.08793478459119797, -0.016238467767834663, 0.03500082343816757, 0.09966298937797546, -0.0025653864722698927, 0.18706591427326202, -0.02070326916873455, 0.1135413646697998, 0.07793030142784119, 0.12631957232952118, 0.011479810811579227, -0.14146627485752106, 0.02590692788362503, -0.07913895696401596, 0.06091299653053284, 0.07468032091856003, 0.0809764489531517, -0.08818645030260086, 0.0665704682469368, -0.04766292870044708, 0.009182384237647057, -0.015275062061846256, -0.08874700218439102, -0.04727715998888016, 0.09808111190795898, 0.10438115149736404, 0.02567334659397602, 0.00847077276557684, 0.022512398660182953, -0.2518356740474701, 0.012680605985224247, 0.08638598769903183, -0.005470482632517815, 0.04981466382741928, 0.12954989075660706, -0.057722028344869614, 0.06888501346111298, -0.052123334258794785, 0.07375156134366989, 0.04988379776477814, -0.11686427891254425, -0.1542164385318756, -0.1451731026172638, 0.00827233586460352, 0.06864019483327866, 0.02820257842540741, -0.0195749681442976, 0.16459694504737854, -0.035771988332271576, 0.05078091844916344, 0.12964124977588654, -0.09903617203235626, -0.02137245051562786, 0.07919828593730927, 0.017468664795160294, 0.06903418153524399, -0.07281029224395752, -0.00571707496419549, 0.03522755205631256, 0.04775158688426018, 0.03129034489393234, 0.01140410266816616, -0.03559023141860962, 0.02868814393877983, -0.14600175619125366, -0.1296621412038803, 0.18908976018428802, 0.017729420214891434, -0.04124141111969948, -0.19488267600536346, -0.015049473382532597, 0.04331722855567932, 0.0055252802558243275, -0.038195423781871796, -0.0013246124144643545, -0.026943299919366837, 0.08175872266292572, -0.010210760869085789, -0.09540978819131851, -0.031316470354795456, 0.01488750521093607, 0.08530744910240173, 0.02498650550842285, -0.009170121513307095, 0.002577505772933364, 0.1152610182762146, 0.016426783055067062, -0.08573790639638901, -0.06596381217241287, -0.06262703239917755, -0.10684427618980408, -0.040224138647317886, 0.01790221408009529, -0.07526678591966629, 0.0365661159157753, 0.23393726348876953, -0.015954645350575447, 0.022767798975110054, -0.10268818587064743, 0.0202874056994915, 0.1249089390039444, 0.051032181829214096, -0.06041745841503143, -0.043233469128608704, -0.031029213219881058, 0.01863616518676281, 0.039901234209537506, -0.02356017753481865, 0.017328133806586266, 0.07390060275793076, 0.031071553006768227, 0.10780008882284164, 0.13651445508003235, 0.041648250073194504, -0.05965758487582207, -0.02579384483397007, 0.27309632301330566, -0.132623091340065, -0.027456413954496384, 0.010715377517044544, -0.039875105023384094, -0.11652971059083939, 0.0793411135673523, 0.001832778682000935, -0.04431833326816559, 0.13377143442630768, -0.0312018021941185, -0.09070692211389542, -0.06939075142145157, -0.053123265504837036, 0.053651805967092514, 0.02252640388906002, -0.0326337032020092, -0.09293501824140549, -0.0803510770201683, -0.07820935547351837, 0.02264963462948799, -0.06867048144340515, -0.039363645017147064, 0.03721654787659645, -0.007556128315627575, -0.01700197532773018, -0.02152647078037262, 0.10901755839586258, -0.049191609025001526, 0.03494403511285782, 0.010416349396109581, 0.019468192011117935, 0.08892284333705902, 0.0447358600795269, -0.12007103860378265, 0.08230463415384293, -0.14038564264774323, 0.09286089241504669, -0.12191160768270493, 0.0021964660845696926, -0.13654066622257233, -0.01664857752621174, -0.02476665750145912, 0.02002740651369095, -0.015745332464575768, 0.10379059612751007, -0.21338051557540894, 0.009045821614563465, 0.1295088678598404, -0.119603231549263, -0.1026461124420166, 0.07649201154708862, -0.037872787564992905, 0.06205795332789421, 0.039010122418403625, 0.10919605195522308, 0.09056361019611359, -0.04851449653506279, -0.10018253326416016, -0.0840473473072052, -0.027940750122070312, 0.15323221683502197, 0.07561784982681274, -0.07776157557964325, 0.09017565101385117, 0.048921942710876465, -0.02446979284286499, -0.08616016805171967, -0.007562460843473673, -0.05794858559966087, -0.01681119203567505, -0.06354483962059021, -0.05671831592917442, 0.00007957794150570408, -0.07578125596046448, -0.010956807062029839, -0.08199051022529602, -0.011785607784986496, 0.09512747079133987, -0.0321107879281044, 0.007504185661673546, -0.05960730090737343, 0.045594342052936554, 0.009926775470376015, 0.01631164737045765, -0.21229980885982513, -0.09725827723741531, 0.041615184396505356, -0.1555548459291458, 0.03254750743508339, 0.028540179133415222, 0.008219251409173012, 0.04930276796221733, -0.00601450027897954, 0.02680128999054432, 0.031931206583976746, -0.0001581985707161948, -0.00749367568641901, -0.14231804013252258, -0.06418735533952713, -0.07705531269311905, 0.07350651919841766, -0.15529249608516693, -0.009226336143910885, 0.0828571543097496, 0.1653563231229782, 0.005334085319191217, -0.08420035243034363, 0.07710383087396622, -0.0030869399197399616, -0.03766927868127823, -0.05741291493177414, 0.00444871187210083, -0.022595658898353577, 0.027301562950015068, 0.040351130068302155, -0.20479832589626312, -0.15673670172691345, 0.07035359740257263, 0.11062101274728775, -0.06239530071616173, -0.088507741689682, -0.06927454471588135, -0.06862388551235199, -0.07370257377624512, -0.05834943801164627, 0.04374457895755768, 0.07838808745145798, 0.04073610529303551, -0.06093508005142212, -0.06800170987844467, 0.018774500116705894, 0.06388183683156967, -0.0702856257557869, 0.10205399245023727, 0.08483374118804932, -0.07739034295082092, 0.09340820461511612, -0.028208941221237183, 0.13429193198680878, 0.053891632705926895, 0.026223475113511086, -0.0943346694111824, 0.0037086959928274155, 0.043866563588380814, 0.0481535829603672, 0.08519448339939117, -0.02735317125916481, 0.03634022921323776, 0.08786298334598541, -0.006103104911744595, 0.03493570536375046, -0.052711427211761475, 0.03064154088497162, 0.04595267400145531, 0.010493599809706211, 0.030295103788375854, 0.017272314056754112, -0.004080225247889757, 0.05997958406805992, 0.02796892635524273, 0.09741411358118057, -0.016885899007320404, -0.05531969293951988, -0.10281362384557724, 0.13767851889133453, -0.07356266677379608, -0.2693723142147064, -0.1621313840150833, -0.030158666893839836, -0.03191625326871872, -0.01745983399450779, 0.06728453189134598, -0.0025672530755400658, -0.10328462719917297, -0.10340166091918945, 0.05061640217900276, 0.01658465340733528, -0.1220669373869896, -0.04369190335273743, 0.05477152764797211, 0.0019190505845472217, -0.16130617260932922, 0.044031236320734024, 0.051082734018564224, -0.0537981241941452, 0.007712508086115122, 0.08519149571657181, 0.13450761139392853, 0.06937931478023529, 0.06669851392507553, -0.02939312346279621, -0.01174420677125454, 0.17976796627044678, -0.10720346868038177, 0.030141839757561684, 0.12365641444921494, -0.05885811150074005, 0.05712210014462471, 0.1570785939693451, 0.01608252339065075, -0.09674645215272903, 0.050939276814460754, 0.06920967251062393, -0.07581573724746704, -0.24854198098182678, -0.12070183455944061, -0.010938374325633049, 0.009762600995600224, 0.11492866277694702, 0.06453432887792587, 0.02342255599796772, 0.030842572450637817, -0.1199236512184143, -0.029073696583509445, -0.046129260212183, 0.08314075320959091, 0.054038189351558685, 0.0013767138589173555, 0.04459180310368538, -0.041350800544023514, 0.03330894559621811, 0.11550948023796082, 0.021176042035222054, 0.1388719528913498, -0.03397456929087639, 0.16766583919525146, 0.08624079078435898, 0.10300402343273163, -0.04952782765030861, 0.044541310518980026, -0.0008008548174984753, 0.07127203792333603, -0.009655280038714409, -0.10722368210554123, -0.051433853805065155, 0.09979090094566345, 0.005047557409852743, -0.07639320194721222, 0.013101756572723389, -0.07056022435426712, 0.030608613044023514, 0.19399923086166382, -0.03427281975746155, -0.14771488308906555, -0.07123439759016037, 0.06154153123497963, -0.0211153794080019, -0.08037784695625305, -0.014843644574284554, 0.08351674675941467, -0.14678342640399933, 0.0343313068151474, -0.03443154692649841, 0.0745520368218422, -0.1367592215538025, -0.012058771215379238, -0.008259913884103298, 0.036631032824516296, 0.009774313308298588, 0.11757898330688477, -0.13501854240894318, 0.11053725332021713, -0.0002523801231291145, 0.0023652941454201937, -0.11405472457408905, 0.05110950395464897, -0.0608334019780159, -0.034989241510629654, 0.13146327435970306, -0.0160143431276083, -0.11345229297876358, -0.04856247454881668, -0.1120348572731018, -0.005711735226213932, 0.06206902861595154, -0.13238340616226196, 0.10114692896604538, 0.037675321102142334, -0.01620572619140148, -0.02715391293168068, -0.010236991569399834, -0.12173905223608017, -0.2304680347442627, 0.11415807902812958, -0.10707932710647583, 0.07474849373102188, -0.0700010135769844, -0.040629852563142776, -0.06891332566738129, 0.15172110497951508, -0.1003803238272667, -0.047129612416028976, -0.11662784218788147, 0.022366156801581383, 0.17367328703403473, -0.04391021654009819, 0.050138093531131744, -0.032731276005506516, 0.17388062179088593, 0.0014459923841059208, -0.05788755789399147, -0.006846235133707523, -0.10366098582744598, -0.19207054376602173, -0.03787633776664734, 0.12153884023427963, 0.07217839360237122, 0.016812749207019806, 0.0001329452934442088, 0.03439430147409439, 0.01934647746384144, -0.09317126870155334, 0.03933563828468323, 0.11562596261501312, 0.12977857887744904, 0.030382046476006508, -0.010433518327772617, -0.08107941597700119, -0.10818157345056534, -0.11119920760393143, 0.05116282403469086, 0.1578303724527359, -0.07466694712638855, 0.17273090779781342, 0.13222739100456238, -0.08834663033485413, -0.1811164766550064, -0.06501130759716034, 0.017127899453043938, -0.024372439831495285, 0.14367003738880157, -0.19151072204113007, 0.05943206325173378, 0.053085096180438995, -0.020408030599355698, 0.04888816922903061, -0.23243434727191925, -0.13762159645557404, 0.030352381989359856, 0.03315652161836624, -0.2310100644826889, -0.18005691468715668, -0.11315932869911194, -0.030970508232712746, -0.172340527176857, 0.13700227439403534, 0.013170010410249233, 0.0071574426256120205, -0.014099765568971634, 0.07101365178823471, 0.06173480674624443, -0.06364060193300247, 0.14278541505336761, -0.012511088512837887, 0.013161052949726582, -0.11161401867866516, -0.026753218844532967, 0.00396352494135499, -0.03919323533773422, 0.08821176737546921, 0.042322203516960144, 0.059669286012649536, -0.08998201042413712, -0.030021334066987038, -0.060970332473516464, 0.03653942793607712, -0.07011370360851288, -0.055247094482183456, -0.06647490710020065, 0.08348912000656128, 0.09031257033348083, -0.012856997549533844, 0.04884691536426544, -0.028571689501404762, 0.05398222804069519, 0.21987506747245789, 0.1206587627530098, 0.04844895005226135, -0.10644786059856415, -0.03080882877111435, -0.01355340052396059, -0.006525536999106407, -0.1475520133972168, 0.04075498878955841, 0.07485394179821014, 0.03844977170228958, 0.06447993218898773, -0.021676458418369293, -0.18417447805404663, -0.008856210857629776, 0.07958041876554489, -0.11832810193300247, -0.222420334815979, 0.026406697928905487, 0.1548628807067871, -0.15075436234474182, -0.0544336773455143, 0.07852768898010254, 0.0027396671939641237, -0.03509888797998428, -0.0004494808381423354, 0.0803229883313179, 0.05455419421195984, 0.09765847772359848, 0.012386586517095566, 0.06412745267152786, -0.07467003911733627, 0.10503721982240677, 0.16123244166374207, -0.10738323628902435, 0.014024530537426472, 0.03658754751086235, -0.06418147683143616, -0.06294897943735123, 0.0088870320469141, -0.03274398297071457, 0.03078526444733143, -0.053502146154642105, 0.020138075575232506, -0.006480837240815163, 0.03947921097278595, 0.09626774489879608, 0.007535350043326616, 0.03876937925815582, 0.032923322170972824, -0.003859204240143299, -0.07708831876516342, 0.08138195425271988, 0.013083363883197308, 0.04593460634350777, -0.046216342598199844, 0.03607840836048126, 0.011654824949800968, -0.00505983317270875, 0.012539094313979149, -0.043865982443094254, -0.059357862919569016, -0.0010813844855874777, -0.13976645469665527, 0.03812675178050995, -0.08128481358289719, -0.004633211996406317, -0.002627885900437832, -0.02827277220785618, -0.0017371049616485834, 0.007250003516674042, -0.06513815373182297, -0.04622316360473633, -0.04364636540412903, 0.13176466524600983, -0.1861373782157898, -0.003307334380224347, 0.09039383381605148, -0.06588773429393768, 0.07745938748121262, -0.00593193806707859, -0.0196186825633049, 0.021397629752755165, -0.07458633184432983, -0.010664614848792553, -0.02369900979101658, 0.059635065495967865, 0.006475046277046204, -0.1534166932106018, -0.03146534785628319, 0.0037652982864528894, -0.0989527553319931, -0.013438839465379715, 0.04633361101150513, -0.15351715683937073, 0.023906948044896126, 0.06916776299476624, -0.03270728141069412, -0.04819416627287865, 0.04262808710336685, 0.02321362867951393, 0.007797515485435724, 0.08979852497577667, -0.004006974399089813, 0.03267411142587662, -0.15546733140945435, -0.05347349867224693, -0.005002179648727179, -0.008780304342508316, 0.017481805756688118, 0.030336491763591766, 0.040723782032728195, 0.0052578141912817955, 0.22369666397571564, -0.011913116089999676, 0.06499635428190231, 0.0332254134118557, 0.004573598969727755, -0.02766902931034565, 0.02721269428730011, 0.01749027520418167, 0.015975646674633026, 0.0340154655277729, 0.03625936433672905, -0.024066755548119545, -0.0598968006670475, -0.04044723138213158, 0.05672352761030197, 0.1518627256155014, 0.14800718426704407, -0.05986969545483589, 0.08146873116493225, -0.17078161239624023, -0.0369003526866436, 0.03180119767785072, -0.030724091455340385, 0.05890090391039848, -0.07828133553266525, 0.04078112170100212, 0.06154759228229523, -0.10016217827796936, 0.14465788006782532, -0.07127267867326736, -0.025276247411966324, -0.04188181459903717, -0.1553252786397934, -0.03389263525605202, -0.003364530624821782, 0.006531265564262867, -0.08559642732143402, 0.09775164723396301, 0.14178283512592316, -0.01912495493888855, -0.007195995654910803, 0.07162664085626602, -0.04818945750594139, -0.04317401349544525, -0.03774164617061615, -0.005696450360119343, 0.007935583591461182, -0.018230104818940163, 0.08108572661876678, 0.015237302519381046, 0.0820719301700592, 0.06694955378770828, 0.09328093379735947, 0.04515066742897034, 0.016956977546215057, -0.035390835255384445, -0.07699462026357651, 0.002836450934410095, -0.014812755398452282, -0.05316444858908653, 0.1884470134973526, 0.053801268339157104, 0.027170415967702866, 0.006720112171024084, 0.22465838491916656, 0.0003445518377702683, -0.058914363384246826, -0.13108180463314056, 0.08145498484373093, -0.012203528545796871, 0.014982587657868862, 0.037489838898181915, -0.13383738696575165, 0.018976112827658653, 0.16082361340522766, 0.1219838485121727, 0.06776461005210876, 0.01410702895373106, 0.030048588290810585, 0.023266438394784927, -0.027446873486042023, 0.03849461302161217, 0.03512909635901451, 0.18227413296699524, -0.05673062056303024, 0.06392224133014679, -0.006597847677767277, 0.00008440392412012443, -0.01833277940750122, 0.08178156614303589, -0.04245956614613533, 0.005369078833609819, -0.04668509587645531, 0.10596694052219391, -0.03431834280490875, -0.26723578572273254, -0.02680051699280739, -0.10227032005786896, -0.12209994345903397, -0.029354477301239967, 0.026612747460603714, -0.03316753730177879, 0.03837592527270317, 0.029847757890820503, -0.020196206867694855, 0.19293755292892456, 0.011540890671312809, -0.0764755830168724, -0.04922032728791237, 0.07270795106887817, -0.02169523574411869, 0.2541322708129883, -0.007889739237725735, 0.08337279409170151, 0.08697809278964996, -0.020240865647792816, -0.1551559716463089, -0.0038857620675116777, 0.10564898699522018, -0.05060470104217529, 0.05976598709821701, 0.17306262254714966, -0.027949459850788116, 0.1447376161813736, 0.04182242229580879, -0.045155689120292664, 0.05767576023936272, 0.0752984806895256, 0.052013833075761795, -0.08659642934799194, 0.07323889434337616, -0.07687824219465256, 0.14047937095165253, 0.10224846750497818, -0.0341726578772068, -0.004634261596947908, -0.0639885887503624, 0.0634256973862648, -0.032363273203372955, 0.1294747143983841, -0.007351785898208618, -0.156924307346344, 0.028745688498020172, 0.0033518755808472633, 0.05738100782036781, -0.21877320110797882, -0.06059146299958229, 0.12245265394449234, -0.05254406854510307, 0.010669304057955742, 0.08713549375534058, 0.055926211178302765, 0.006023187655955553, -0.07609487324953079, -0.07726790010929108, -0.008352578617632389, 0.12621274590492249, -0.10061348974704742, -0.03915707767009735 ]
06461da056605f03742571a15ad91eaede744b75
# Dataset Card for Evaluation run of SJ-Donald/SJ-SOLAR-10.7b-DPO <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [SJ-Donald/SJ-SOLAR-10.7b-DPO](https://huggingface.co/SJ-Donald/SJ-SOLAR-10.7b-DPO) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_SJ-Donald__SJ-SOLAR-10.7b-DPO", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-01-25T05:53:20.241050](https://huggingface.co/datasets/open-llm-leaderboard/details_SJ-Donald__SJ-SOLAR-10.7b-DPO/blob/main/results_2024-01-25T05-53-20.241050.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.6694201238242145, "acc_stderr": 0.03145425883361444, "acc_norm": 0.6709590638465028, "acc_norm_stderr": 0.03209348907350449, "mc1": 0.5152998776009792, "mc1_stderr": 0.0174953044731879, "mc2": 0.6774426022949598, "mc2_stderr": 0.014870145786575549 }, "harness|arc:challenge|25": { "acc": 0.6535836177474402, "acc_stderr": 0.013905011180063232, "acc_norm": 0.6825938566552902, "acc_norm_stderr": 0.013602239088038167 }, "harness|hellaswag|10": { "acc": 0.6835291774546903, "acc_stderr": 0.0046414842733351, "acc_norm": 0.8695478988249352, "acc_norm_stderr": 0.003361118395452385 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.35, "acc_stderr": 0.0479372485441102, "acc_norm": 0.35, "acc_norm_stderr": 0.0479372485441102 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.5703703703703704, "acc_stderr": 0.042763494943765995, "acc_norm": 0.5703703703703704, "acc_norm_stderr": 0.042763494943765995 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.7631578947368421, "acc_stderr": 0.03459777606810535, "acc_norm": 0.7631578947368421, "acc_norm_stderr": 0.03459777606810535 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.72, "acc_stderr": 0.045126085985421276, "acc_norm": 0.72, "acc_norm_stderr": 0.045126085985421276 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.7018867924528301, "acc_stderr": 0.02815283794249387, "acc_norm": 0.7018867924528301, "acc_norm_stderr": 0.02815283794249387 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.7569444444444444, "acc_stderr": 0.0358687928008034, "acc_norm": 0.7569444444444444, "acc_norm_stderr": 0.0358687928008034 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.43, "acc_stderr": 0.049756985195624284, "acc_norm": 0.43, "acc_norm_stderr": 0.049756985195624284 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.52, "acc_stderr": 0.050211673156867795, "acc_norm": 0.52, "acc_norm_stderr": 0.050211673156867795 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.3, "acc_stderr": 0.046056618647183814, "acc_norm": 0.3, "acc_norm_stderr": 0.046056618647183814 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.6994219653179191, "acc_stderr": 0.0349610148119118, "acc_norm": 0.6994219653179191, "acc_norm_stderr": 0.0349610148119118 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.39215686274509803, "acc_stderr": 0.048580835742663454, "acc_norm": 0.39215686274509803, "acc_norm_stderr": 0.048580835742663454 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.76, "acc_stderr": 0.042923469599092816, "acc_norm": 0.76, "acc_norm_stderr": 0.042923469599092816 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.6212765957446809, "acc_stderr": 0.03170995606040655, "acc_norm": 0.6212765957446809, "acc_norm_stderr": 0.03170995606040655 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.5, "acc_stderr": 0.047036043419179864, "acc_norm": 0.5, "acc_norm_stderr": 0.047036043419179864 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.6275862068965518, "acc_stderr": 0.04028731532947559, "acc_norm": 0.6275862068965518, "acc_norm_stderr": 0.04028731532947559 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.48148148148148145, "acc_stderr": 0.02573364199183898, "acc_norm": 0.48148148148148145, "acc_norm_stderr": 0.02573364199183898 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.4444444444444444, "acc_stderr": 0.044444444444444495, "acc_norm": 0.4444444444444444, "acc_norm_stderr": 0.044444444444444495 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.37, "acc_stderr": 0.04852365870939099, "acc_norm": 0.37, "acc_norm_stderr": 0.04852365870939099 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.8064516129032258, "acc_stderr": 0.022475258525536057, "acc_norm": 0.8064516129032258, "acc_norm_stderr": 0.022475258525536057 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.4876847290640394, "acc_stderr": 0.035169204442208966, "acc_norm": 0.4876847290640394, "acc_norm_stderr": 0.035169204442208966 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.69, "acc_stderr": 0.04648231987117316, "acc_norm": 0.69, "acc_norm_stderr": 0.04648231987117316 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.806060606060606, "acc_stderr": 0.03087414513656209, "acc_norm": 0.806060606060606, "acc_norm_stderr": 0.03087414513656209 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.8686868686868687, "acc_stderr": 0.024063156416822516, "acc_norm": 0.8686868686868687, "acc_norm_stderr": 0.024063156416822516 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.9067357512953368, "acc_stderr": 0.02098685459328972, "acc_norm": 0.9067357512953368, "acc_norm_stderr": 0.02098685459328972 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.676923076923077, "acc_stderr": 0.023710888501970565, "acc_norm": 0.676923076923077, "acc_norm_stderr": 0.023710888501970565 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.37777777777777777, "acc_stderr": 0.029560707392465715, "acc_norm": 0.37777777777777777, "acc_norm_stderr": 0.029560707392465715 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.7184873949579832, "acc_stderr": 0.029213549414372174, "acc_norm": 0.7184873949579832, "acc_norm_stderr": 0.029213549414372174 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.3509933774834437, "acc_stderr": 0.03896981964257375, "acc_norm": 0.3509933774834437, "acc_norm_stderr": 0.03896981964257375 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.8568807339449541, "acc_stderr": 0.01501446249716859, "acc_norm": 0.8568807339449541, "acc_norm_stderr": 0.01501446249716859 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.6111111111111112, "acc_stderr": 0.033247089118091176, "acc_norm": 0.6111111111111112, "acc_norm_stderr": 0.033247089118091176 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.8676470588235294, "acc_stderr": 0.023784297520918856, "acc_norm": 0.8676470588235294, "acc_norm_stderr": 0.023784297520918856 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.8776371308016878, "acc_stderr": 0.021331741829746786, "acc_norm": 0.8776371308016878, "acc_norm_stderr": 0.021331741829746786 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.7174887892376681, "acc_stderr": 0.03021683101150878, "acc_norm": 0.7174887892376681, "acc_norm_stderr": 0.03021683101150878 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.7404580152671756, "acc_stderr": 0.03844876139785271, "acc_norm": 0.7404580152671756, "acc_norm_stderr": 0.03844876139785271 }, "harness|hendrycksTest-international_law|5": { "acc": 0.8099173553719008, "acc_stderr": 0.03581796951709282, "acc_norm": 0.8099173553719008, "acc_norm_stderr": 0.03581796951709282 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.7777777777777778, "acc_stderr": 0.040191074725573483, "acc_norm": 0.7777777777777778, "acc_norm_stderr": 0.040191074725573483 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.7423312883435583, "acc_stderr": 0.03436150827846917, "acc_norm": 0.7423312883435583, "acc_norm_stderr": 0.03436150827846917 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.4732142857142857, "acc_stderr": 0.047389751192741546, "acc_norm": 0.4732142857142857, "acc_norm_stderr": 0.047389751192741546 }, "harness|hendrycksTest-management|5": { "acc": 0.8155339805825242, "acc_stderr": 0.03840423627288276, "acc_norm": 0.8155339805825242, "acc_norm_stderr": 0.03840423627288276 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8717948717948718, "acc_stderr": 0.02190190511507333, "acc_norm": 0.8717948717948718, "acc_norm_stderr": 0.02190190511507333 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.79, "acc_stderr": 0.040936018074033256, "acc_norm": 0.79, "acc_norm_stderr": 0.040936018074033256 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.8186462324393359, "acc_stderr": 0.013778693778464076, "acc_norm": 0.8186462324393359, "acc_norm_stderr": 0.013778693778464076 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.7658959537572254, "acc_stderr": 0.022797110278071124, "acc_norm": 0.7658959537572254, "acc_norm_stderr": 0.022797110278071124 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.4402234636871508, "acc_stderr": 0.01660256461504994, "acc_norm": 0.4402234636871508, "acc_norm_stderr": 0.01660256461504994 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.761437908496732, "acc_stderr": 0.02440439492808787, "acc_norm": 0.761437908496732, "acc_norm_stderr": 0.02440439492808787 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.7331189710610932, "acc_stderr": 0.025122637608816657, "acc_norm": 0.7331189710610932, "acc_norm_stderr": 0.025122637608816657 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.7808641975308642, "acc_stderr": 0.023016705640262196, "acc_norm": 0.7808641975308642, "acc_norm_stderr": 0.023016705640262196 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.5390070921985816, "acc_stderr": 0.02973659252642444, "acc_norm": 0.5390070921985816, "acc_norm_stderr": 0.02973659252642444 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.5032594524119948, "acc_stderr": 0.012769964760343318, "acc_norm": 0.5032594524119948, "acc_norm_stderr": 0.012769964760343318 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.7352941176470589, "acc_stderr": 0.026799562024887667, "acc_norm": 0.7352941176470589, "acc_norm_stderr": 0.026799562024887667 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.7026143790849673, "acc_stderr": 0.018492596536396955, "acc_norm": 0.7026143790849673, "acc_norm_stderr": 0.018492596536396955 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.7090909090909091, "acc_stderr": 0.04350271442923243, "acc_norm": 0.7090909090909091, "acc_norm_stderr": 0.04350271442923243 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.7795918367346939, "acc_stderr": 0.02653704531214529, "acc_norm": 0.7795918367346939, "acc_norm_stderr": 0.02653704531214529 }, "harness|hendrycksTest-sociology|5": { "acc": 0.845771144278607, "acc_stderr": 0.025538433368578337, "acc_norm": 0.845771144278607, "acc_norm_stderr": 0.025538433368578337 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.89, "acc_stderr": 0.03144660377352203, "acc_norm": 0.89, "acc_norm_stderr": 0.03144660377352203 }, "harness|hendrycksTest-virology|5": { "acc": 0.572289156626506, "acc_stderr": 0.03851597683718533, "acc_norm": 0.572289156626506, "acc_norm_stderr": 0.03851597683718533 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.8070175438596491, "acc_stderr": 0.030267457554898458, "acc_norm": 0.8070175438596491, "acc_norm_stderr": 0.030267457554898458 }, "harness|truthfulqa:mc|0": { "mc1": 0.5152998776009792, "mc1_stderr": 0.0174953044731879, "mc2": 0.6774426022949598, "mc2_stderr": 0.014870145786575549 }, "harness|winogrande|5": { "acc": 0.8421468034727704, "acc_stderr": 0.010247165248719763 }, "harness|gsm8k|5": { "acc": 0.6209249431387415, "acc_stderr": 0.013363630295088361 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_SJ-Donald__SJ-SOLAR-10.7b-DPO
[ "region:us" ]
2024-01-25T05:55:34+00:00
{"pretty_name": "Evaluation run of SJ-Donald/SJ-SOLAR-10.7b-DPO", "dataset_summary": "Dataset automatically created during the evaluation run of model [SJ-Donald/SJ-SOLAR-10.7b-DPO](https://huggingface.co/SJ-Donald/SJ-SOLAR-10.7b-DPO) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_SJ-Donald__SJ-SOLAR-10.7b-DPO\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-01-25T05:53:20.241050](https://huggingface.co/datasets/open-llm-leaderboard/details_SJ-Donald__SJ-SOLAR-10.7b-DPO/blob/main/results_2024-01-25T05-53-20.241050.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.6694201238242145,\n \"acc_stderr\": 0.03145425883361444,\n \"acc_norm\": 0.6709590638465028,\n \"acc_norm_stderr\": 0.03209348907350449,\n \"mc1\": 0.5152998776009792,\n \"mc1_stderr\": 0.0174953044731879,\n \"mc2\": 0.6774426022949598,\n \"mc2_stderr\": 0.014870145786575549\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.6535836177474402,\n \"acc_stderr\": 0.013905011180063232,\n \"acc_norm\": 0.6825938566552902,\n \"acc_norm_stderr\": 0.013602239088038167\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6835291774546903,\n \"acc_stderr\": 0.0046414842733351,\n \"acc_norm\": 0.8695478988249352,\n \"acc_norm_stderr\": 0.003361118395452385\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.35,\n \"acc_stderr\": 0.0479372485441102,\n \"acc_norm\": 0.35,\n \"acc_norm_stderr\": 0.0479372485441102\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.5703703703703704,\n \"acc_stderr\": 0.042763494943765995,\n \"acc_norm\": 0.5703703703703704,\n \"acc_norm_stderr\": 0.042763494943765995\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.7631578947368421,\n \"acc_stderr\": 0.03459777606810535,\n \"acc_norm\": 0.7631578947368421,\n \"acc_norm_stderr\": 0.03459777606810535\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.72,\n \"acc_stderr\": 0.045126085985421276,\n \"acc_norm\": 0.72,\n \"acc_norm_stderr\": 0.045126085985421276\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.7018867924528301,\n \"acc_stderr\": 0.02815283794249387,\n \"acc_norm\": 0.7018867924528301,\n \"acc_norm_stderr\": 0.02815283794249387\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.7569444444444444,\n \"acc_stderr\": 0.0358687928008034,\n \"acc_norm\": 0.7569444444444444,\n \"acc_norm_stderr\": 0.0358687928008034\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.43,\n \"acc_stderr\": 0.049756985195624284,\n \"acc_norm\": 0.43,\n \"acc_norm_stderr\": 0.049756985195624284\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.52,\n \"acc_stderr\": 0.050211673156867795,\n \"acc_norm\": 0.52,\n \"acc_norm_stderr\": 0.050211673156867795\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.3,\n \"acc_stderr\": 0.046056618647183814,\n \"acc_norm\": 0.3,\n \"acc_norm_stderr\": 0.046056618647183814\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.6994219653179191,\n \"acc_stderr\": 0.0349610148119118,\n \"acc_norm\": 0.6994219653179191,\n \"acc_norm_stderr\": 0.0349610148119118\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.39215686274509803,\n \"acc_stderr\": 0.048580835742663454,\n \"acc_norm\": 0.39215686274509803,\n \"acc_norm_stderr\": 0.048580835742663454\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.76,\n \"acc_stderr\": 0.042923469599092816,\n \"acc_norm\": 0.76,\n \"acc_norm_stderr\": 0.042923469599092816\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.6212765957446809,\n \"acc_stderr\": 0.03170995606040655,\n \"acc_norm\": 0.6212765957446809,\n \"acc_norm_stderr\": 0.03170995606040655\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.5,\n \"acc_stderr\": 0.047036043419179864,\n \"acc_norm\": 0.5,\n \"acc_norm_stderr\": 0.047036043419179864\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.6275862068965518,\n \"acc_stderr\": 0.04028731532947559,\n \"acc_norm\": 0.6275862068965518,\n \"acc_norm_stderr\": 0.04028731532947559\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.48148148148148145,\n \"acc_stderr\": 0.02573364199183898,\n \"acc_norm\": 0.48148148148148145,\n \"acc_norm_stderr\": 0.02573364199183898\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.4444444444444444,\n \"acc_stderr\": 0.044444444444444495,\n \"acc_norm\": 0.4444444444444444,\n \"acc_norm_stderr\": 0.044444444444444495\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.37,\n \"acc_stderr\": 0.04852365870939099,\n \"acc_norm\": 0.37,\n \"acc_norm_stderr\": 0.04852365870939099\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.8064516129032258,\n \"acc_stderr\": 0.022475258525536057,\n \"acc_norm\": 0.8064516129032258,\n \"acc_norm_stderr\": 0.022475258525536057\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.4876847290640394,\n \"acc_stderr\": 0.035169204442208966,\n \"acc_norm\": 0.4876847290640394,\n \"acc_norm_stderr\": 0.035169204442208966\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.69,\n \"acc_stderr\": 0.04648231987117316,\n \"acc_norm\": 0.69,\n \"acc_norm_stderr\": 0.04648231987117316\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.806060606060606,\n \"acc_stderr\": 0.03087414513656209,\n \"acc_norm\": 0.806060606060606,\n \"acc_norm_stderr\": 0.03087414513656209\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.8686868686868687,\n \"acc_stderr\": 0.024063156416822516,\n \"acc_norm\": 0.8686868686868687,\n \"acc_norm_stderr\": 0.024063156416822516\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.9067357512953368,\n \"acc_stderr\": 0.02098685459328972,\n \"acc_norm\": 0.9067357512953368,\n \"acc_norm_stderr\": 0.02098685459328972\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.676923076923077,\n \"acc_stderr\": 0.023710888501970565,\n \"acc_norm\": 0.676923076923077,\n \"acc_norm_stderr\": 0.023710888501970565\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.37777777777777777,\n \"acc_stderr\": 0.029560707392465715,\n \"acc_norm\": 0.37777777777777777,\n \"acc_norm_stderr\": 0.029560707392465715\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.7184873949579832,\n \"acc_stderr\": 0.029213549414372174,\n \"acc_norm\": 0.7184873949579832,\n \"acc_norm_stderr\": 0.029213549414372174\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.3509933774834437,\n \"acc_stderr\": 0.03896981964257375,\n \"acc_norm\": 0.3509933774834437,\n \"acc_norm_stderr\": 0.03896981964257375\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.8568807339449541,\n \"acc_stderr\": 0.01501446249716859,\n \"acc_norm\": 0.8568807339449541,\n \"acc_norm_stderr\": 0.01501446249716859\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.6111111111111112,\n \"acc_stderr\": 0.033247089118091176,\n \"acc_norm\": 0.6111111111111112,\n \"acc_norm_stderr\": 0.033247089118091176\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.8676470588235294,\n \"acc_stderr\": 0.023784297520918856,\n \"acc_norm\": 0.8676470588235294,\n \"acc_norm_stderr\": 0.023784297520918856\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.8776371308016878,\n \"acc_stderr\": 0.021331741829746786,\n \"acc_norm\": 0.8776371308016878,\n \"acc_norm_stderr\": 0.021331741829746786\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.7174887892376681,\n \"acc_stderr\": 0.03021683101150878,\n \"acc_norm\": 0.7174887892376681,\n \"acc_norm_stderr\": 0.03021683101150878\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.7404580152671756,\n \"acc_stderr\": 0.03844876139785271,\n \"acc_norm\": 0.7404580152671756,\n \"acc_norm_stderr\": 0.03844876139785271\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.8099173553719008,\n \"acc_stderr\": 0.03581796951709282,\n \"acc_norm\": 0.8099173553719008,\n \"acc_norm_stderr\": 0.03581796951709282\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.7777777777777778,\n \"acc_stderr\": 0.040191074725573483,\n \"acc_norm\": 0.7777777777777778,\n \"acc_norm_stderr\": 0.040191074725573483\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.7423312883435583,\n \"acc_stderr\": 0.03436150827846917,\n \"acc_norm\": 0.7423312883435583,\n \"acc_norm_stderr\": 0.03436150827846917\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.4732142857142857,\n \"acc_stderr\": 0.047389751192741546,\n \"acc_norm\": 0.4732142857142857,\n \"acc_norm_stderr\": 0.047389751192741546\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.8155339805825242,\n \"acc_stderr\": 0.03840423627288276,\n \"acc_norm\": 0.8155339805825242,\n \"acc_norm_stderr\": 0.03840423627288276\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8717948717948718,\n \"acc_stderr\": 0.02190190511507333,\n \"acc_norm\": 0.8717948717948718,\n \"acc_norm_stderr\": 0.02190190511507333\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.79,\n \"acc_stderr\": 0.040936018074033256,\n \"acc_norm\": 0.79,\n \"acc_norm_stderr\": 0.040936018074033256\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.8186462324393359,\n \"acc_stderr\": 0.013778693778464076,\n \"acc_norm\": 0.8186462324393359,\n \"acc_norm_stderr\": 0.013778693778464076\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.7658959537572254,\n \"acc_stderr\": 0.022797110278071124,\n \"acc_norm\": 0.7658959537572254,\n \"acc_norm_stderr\": 0.022797110278071124\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.4402234636871508,\n \"acc_stderr\": 0.01660256461504994,\n \"acc_norm\": 0.4402234636871508,\n \"acc_norm_stderr\": 0.01660256461504994\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.761437908496732,\n \"acc_stderr\": 0.02440439492808787,\n \"acc_norm\": 0.761437908496732,\n \"acc_norm_stderr\": 0.02440439492808787\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.7331189710610932,\n \"acc_stderr\": 0.025122637608816657,\n \"acc_norm\": 0.7331189710610932,\n \"acc_norm_stderr\": 0.025122637608816657\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.7808641975308642,\n \"acc_stderr\": 0.023016705640262196,\n \"acc_norm\": 0.7808641975308642,\n \"acc_norm_stderr\": 0.023016705640262196\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.5390070921985816,\n \"acc_stderr\": 0.02973659252642444,\n \"acc_norm\": 0.5390070921985816,\n \"acc_norm_stderr\": 0.02973659252642444\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.5032594524119948,\n \"acc_stderr\": 0.012769964760343318,\n \"acc_norm\": 0.5032594524119948,\n \"acc_norm_stderr\": 0.012769964760343318\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.7352941176470589,\n \"acc_stderr\": 0.026799562024887667,\n \"acc_norm\": 0.7352941176470589,\n \"acc_norm_stderr\": 0.026799562024887667\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.7026143790849673,\n \"acc_stderr\": 0.018492596536396955,\n \"acc_norm\": 0.7026143790849673,\n \"acc_norm_stderr\": 0.018492596536396955\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.7090909090909091,\n \"acc_stderr\": 0.04350271442923243,\n \"acc_norm\": 0.7090909090909091,\n \"acc_norm_stderr\": 0.04350271442923243\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.7795918367346939,\n \"acc_stderr\": 0.02653704531214529,\n \"acc_norm\": 0.7795918367346939,\n \"acc_norm_stderr\": 0.02653704531214529\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.845771144278607,\n \"acc_stderr\": 0.025538433368578337,\n \"acc_norm\": 0.845771144278607,\n \"acc_norm_stderr\": 0.025538433368578337\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.89,\n \"acc_stderr\": 0.03144660377352203,\n \"acc_norm\": 0.89,\n \"acc_norm_stderr\": 0.03144660377352203\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.572289156626506,\n \"acc_stderr\": 0.03851597683718533,\n \"acc_norm\": 0.572289156626506,\n \"acc_norm_stderr\": 0.03851597683718533\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.8070175438596491,\n \"acc_stderr\": 0.030267457554898458,\n \"acc_norm\": 0.8070175438596491,\n \"acc_norm_stderr\": 0.030267457554898458\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.5152998776009792,\n \"mc1_stderr\": 0.0174953044731879,\n \"mc2\": 0.6774426022949598,\n \"mc2_stderr\": 0.014870145786575549\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.8421468034727704,\n \"acc_stderr\": 0.010247165248719763\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.6209249431387415,\n \"acc_stderr\": 0.013363630295088361\n }\n}\n```", "repo_url": "https://huggingface.co/SJ-Donald/SJ-SOLAR-10.7b-DPO", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_01_25T05_53_20.241050", "path": ["**/details_harness|arc:challenge|25_2024-01-25T05-53-20.241050.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-01-25T05-53-20.241050.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_01_25T05_53_20.241050", "path": ["**/details_harness|gsm8k|5_2024-01-25T05-53-20.241050.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-01-25T05-53-20.241050.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_01_25T05_53_20.241050", "path": ["**/details_harness|hellaswag|10_2024-01-25T05-53-20.241050.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-01-25T05-53-20.241050.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_01_25T05_53_20.241050", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-25T05-53-20.241050.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-25T05-53-20.241050.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-25T05-53-20.241050.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-25T05-53-20.241050.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-25T05-53-20.241050.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-25T05-53-20.241050.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-25T05-53-20.241050.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-25T05-53-20.241050.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-25T05-53-20.241050.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-25T05-53-20.241050.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-25T05-53-20.241050.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-25T05-53-20.241050.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-25T05-53-20.241050.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-25T05-53-20.241050.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-25T05-53-20.241050.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-25T05-53-20.241050.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-25T05-53-20.241050.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-25T05-53-20.241050.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-25T05-53-20.241050.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-25T05-53-20.241050.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-25T05-53-20.241050.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-25T05-53-20.241050.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-25T05-53-20.241050.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-25T05-53-20.241050.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-25T05-53-20.241050.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-25T05-53-20.241050.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-25T05-53-20.241050.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-25T05-53-20.241050.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-25T05-53-20.241050.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-25T05-53-20.241050.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-25T05-53-20.241050.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-25T05-53-20.241050.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-25T05-53-20.241050.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-25T05-53-20.241050.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-25T05-53-20.241050.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-25T05-53-20.241050.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-25T05-53-20.241050.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-25T05-53-20.241050.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-25T05-53-20.241050.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-25T05-53-20.241050.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-25T05-53-20.241050.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-25T05-53-20.241050.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-25T05-53-20.241050.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-25T05-53-20.241050.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-25T05-53-20.241050.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-25T05-53-20.241050.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-25T05-53-20.241050.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-25T05-53-20.241050.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-25T05-53-20.241050.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-25T05-53-20.241050.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-25T05-53-20.241050.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-25T05-53-20.241050.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-25T05-53-20.241050.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-25T05-53-20.241050.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-25T05-53-20.241050.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-25T05-53-20.241050.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-25T05-53-20.241050.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-25T05-53-20.241050.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-25T05-53-20.241050.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-25T05-53-20.241050.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-25T05-53-20.241050.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-25T05-53-20.241050.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-25T05-53-20.241050.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-25T05-53-20.241050.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-25T05-53-20.241050.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-25T05-53-20.241050.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-25T05-53-20.241050.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-25T05-53-20.241050.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-25T05-53-20.241050.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-25T05-53-20.241050.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-25T05-53-20.241050.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-25T05-53-20.241050.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-25T05-53-20.241050.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-25T05-53-20.241050.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-25T05-53-20.241050.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-25T05-53-20.241050.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-25T05-53-20.241050.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-25T05-53-20.241050.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-25T05-53-20.241050.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-25T05-53-20.241050.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-25T05-53-20.241050.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-25T05-53-20.241050.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-25T05-53-20.241050.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-25T05-53-20.241050.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-25T05-53-20.241050.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-25T05-53-20.241050.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-25T05-53-20.241050.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-25T05-53-20.241050.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-25T05-53-20.241050.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-25T05-53-20.241050.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-25T05-53-20.241050.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-25T05-53-20.241050.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-25T05-53-20.241050.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-25T05-53-20.241050.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-25T05-53-20.241050.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-25T05-53-20.241050.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-25T05-53-20.241050.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-25T05-53-20.241050.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-25T05-53-20.241050.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-25T05-53-20.241050.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-25T05-53-20.241050.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-25T05-53-20.241050.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-25T05-53-20.241050.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-25T05-53-20.241050.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-25T05-53-20.241050.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-25T05-53-20.241050.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-25T05-53-20.241050.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-25T05-53-20.241050.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-25T05-53-20.241050.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-25T05-53-20.241050.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-25T05-53-20.241050.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-25T05-53-20.241050.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-25T05-53-20.241050.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-25T05-53-20.241050.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_01_25T05_53_20.241050", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-25T05-53-20.241050.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-25T05-53-20.241050.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_01_25T05_53_20.241050", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-25T05-53-20.241050.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-25T05-53-20.241050.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_01_25T05_53_20.241050", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-25T05-53-20.241050.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-25T05-53-20.241050.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_01_25T05_53_20.241050", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-25T05-53-20.241050.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-25T05-53-20.241050.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_01_25T05_53_20.241050", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-25T05-53-20.241050.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-25T05-53-20.241050.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_01_25T05_53_20.241050", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-25T05-53-20.241050.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-25T05-53-20.241050.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_01_25T05_53_20.241050", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-25T05-53-20.241050.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-25T05-53-20.241050.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_01_25T05_53_20.241050", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-25T05-53-20.241050.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-25T05-53-20.241050.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_01_25T05_53_20.241050", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-25T05-53-20.241050.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-25T05-53-20.241050.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_01_25T05_53_20.241050", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-25T05-53-20.241050.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-25T05-53-20.241050.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_01_25T05_53_20.241050", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-25T05-53-20.241050.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-25T05-53-20.241050.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_01_25T05_53_20.241050", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-25T05-53-20.241050.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-25T05-53-20.241050.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_01_25T05_53_20.241050", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-25T05-53-20.241050.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-25T05-53-20.241050.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_01_25T05_53_20.241050", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-25T05-53-20.241050.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-25T05-53-20.241050.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_01_25T05_53_20.241050", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-25T05-53-20.241050.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-25T05-53-20.241050.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_01_25T05_53_20.241050", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-25T05-53-20.241050.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-25T05-53-20.241050.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_01_25T05_53_20.241050", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-25T05-53-20.241050.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-25T05-53-20.241050.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_01_25T05_53_20.241050", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-25T05-53-20.241050.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-25T05-53-20.241050.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_01_25T05_53_20.241050", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-25T05-53-20.241050.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-25T05-53-20.241050.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_01_25T05_53_20.241050", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-25T05-53-20.241050.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-25T05-53-20.241050.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_01_25T05_53_20.241050", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-25T05-53-20.241050.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-25T05-53-20.241050.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_01_25T05_53_20.241050", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-25T05-53-20.241050.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-25T05-53-20.241050.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_01_25T05_53_20.241050", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-25T05-53-20.241050.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-25T05-53-20.241050.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_01_25T05_53_20.241050", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-25T05-53-20.241050.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-25T05-53-20.241050.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_01_25T05_53_20.241050", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-25T05-53-20.241050.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-25T05-53-20.241050.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_01_25T05_53_20.241050", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-25T05-53-20.241050.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-25T05-53-20.241050.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_01_25T05_53_20.241050", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-25T05-53-20.241050.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-25T05-53-20.241050.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_01_25T05_53_20.241050", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-25T05-53-20.241050.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-25T05-53-20.241050.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_01_25T05_53_20.241050", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-25T05-53-20.241050.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-25T05-53-20.241050.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_01_25T05_53_20.241050", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-25T05-53-20.241050.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-25T05-53-20.241050.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_01_25T05_53_20.241050", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-25T05-53-20.241050.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-25T05-53-20.241050.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_01_25T05_53_20.241050", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-25T05-53-20.241050.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-25T05-53-20.241050.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_01_25T05_53_20.241050", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-25T05-53-20.241050.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-25T05-53-20.241050.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_01_25T05_53_20.241050", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-25T05-53-20.241050.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-25T05-53-20.241050.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_01_25T05_53_20.241050", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-25T05-53-20.241050.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-25T05-53-20.241050.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_01_25T05_53_20.241050", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-25T05-53-20.241050.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-25T05-53-20.241050.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_01_25T05_53_20.241050", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-25T05-53-20.241050.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-25T05-53-20.241050.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_01_25T05_53_20.241050", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-25T05-53-20.241050.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-25T05-53-20.241050.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_01_25T05_53_20.241050", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-25T05-53-20.241050.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-25T05-53-20.241050.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_01_25T05_53_20.241050", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-25T05-53-20.241050.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-25T05-53-20.241050.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_01_25T05_53_20.241050", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-25T05-53-20.241050.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-25T05-53-20.241050.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_01_25T05_53_20.241050", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-25T05-53-20.241050.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-25T05-53-20.241050.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_01_25T05_53_20.241050", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-25T05-53-20.241050.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-25T05-53-20.241050.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_01_25T05_53_20.241050", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-25T05-53-20.241050.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-25T05-53-20.241050.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_01_25T05_53_20.241050", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-25T05-53-20.241050.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-25T05-53-20.241050.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_01_25T05_53_20.241050", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-25T05-53-20.241050.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-25T05-53-20.241050.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_01_25T05_53_20.241050", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-25T05-53-20.241050.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-25T05-53-20.241050.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_01_25T05_53_20.241050", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-25T05-53-20.241050.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-25T05-53-20.241050.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_01_25T05_53_20.241050", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-25T05-53-20.241050.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-25T05-53-20.241050.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_01_25T05_53_20.241050", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-25T05-53-20.241050.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-25T05-53-20.241050.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_01_25T05_53_20.241050", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-25T05-53-20.241050.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-25T05-53-20.241050.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_01_25T05_53_20.241050", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-25T05-53-20.241050.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-25T05-53-20.241050.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_01_25T05_53_20.241050", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-25T05-53-20.241050.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-25T05-53-20.241050.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_01_25T05_53_20.241050", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-25T05-53-20.241050.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-25T05-53-20.241050.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_01_25T05_53_20.241050", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-25T05-53-20.241050.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-25T05-53-20.241050.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_01_25T05_53_20.241050", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-25T05-53-20.241050.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-25T05-53-20.241050.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_01_25T05_53_20.241050", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-25T05-53-20.241050.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-25T05-53-20.241050.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_01_25T05_53_20.241050", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-25T05-53-20.241050.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-25T05-53-20.241050.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_01_25T05_53_20.241050", "path": ["**/details_harness|winogrande|5_2024-01-25T05-53-20.241050.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-01-25T05-53-20.241050.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_01_25T05_53_20.241050", "path": ["results_2024-01-25T05-53-20.241050.parquet"]}, {"split": "latest", "path": ["results_2024-01-25T05-53-20.241050.parquet"]}]}]}
2024-01-25T05:55:56+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of SJ-Donald/SJ-SOLAR-10.7b-DPO Dataset automatically created during the evaluation run of model SJ-Donald/SJ-SOLAR-10.7b-DPO on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-01-25T05:53:20.241050(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of SJ-Donald/SJ-SOLAR-10.7b-DPO\n\n\n\nDataset automatically created during the evaluation run of model SJ-Donald/SJ-SOLAR-10.7b-DPO on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-25T05:53:20.241050(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of SJ-Donald/SJ-SOLAR-10.7b-DPO\n\n\n\nDataset automatically created during the evaluation run of model SJ-Donald/SJ-SOLAR-10.7b-DPO on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-25T05:53:20.241050(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ 6, 195, 68, 4, 40, 29, 3, 4, 9, 6, 5, 7, 4, 7, 10, 9, 5, 9, 8, 10, 46, 8, 7, 10, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of SJ-Donald/SJ-SOLAR-10.7b-DPO\n\n\n\nDataset automatically created during the evaluation run of model SJ-Donald/SJ-SOLAR-10.7b-DPO on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2024-01-25T05:53:20.241050(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):## Dataset Details### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:## Uses### Direct Use### Out-of-Scope Use## Dataset Structure## Dataset Creation### Curation Rationale### Source Data#### Data Collection and Processing#### Who are the source data producers?### Annotations [optional]#### Annotation process#### Who are the annotators?#### Personal and Sensitive Information## Bias, Risks, and Limitations### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:## Glossary [optional]## More Information [optional]" ]
[ -0.05233246088027954, 0.2244400531053543, -0.00550096994265914, 0.011256800033152103, 0.10450908541679382, -0.017878443002700806, 0.01969110034406185, 0.10279770195484161, -0.012487873435020447, 0.1626834124326706, -0.010964836925268173, 0.10268586874008179, 0.09300317615270615, 0.1493835747241974, 0.012253372929990292, -0.13970379531383514, 0.026980353519320488, -0.07341668009757996, 0.05457395315170288, 0.07859653234481812, 0.09240512549877167, -0.08666183799505234, 0.05830305069684982, -0.06275198608636856, -0.0361531600356102, 0.01579071395099163, -0.08917072415351868, -0.04807140305638313, 0.09237964451313019, 0.08302146196365356, 0.03144756332039833, 0.010494234040379524, 0.034597981721162796, -0.24108253419399261, 0.01152127142995596, 0.08779112249612808, 0.011498544365167618, 0.05383540689945221, 0.12350257486104965, -0.03250295668840408, 0.0711311474442482, -0.09366169571876526, 0.06249992921948433, 0.0340413972735405, -0.10835287719964981, -0.1533946394920349, -0.12911035120487213, 0.010978633537888527, 0.06672865152359009, 0.0414612703025341, -0.024336479604244232, 0.1548483520746231, -0.02472492679953575, 0.042219579219818115, 0.10488422960042953, -0.10096853971481323, -0.020067421719431877, 0.06149367243051529, 0.017444992437958717, 0.0702199637889862, -0.07947832345962524, -0.013337856158614159, 0.024657122790813446, 0.04173720255494118, 0.025982316583395004, -0.0017580557614564896, -0.0756545215845108, 0.03128992021083832, -0.12121542543172836, -0.11341550201177597, 0.23554931581020355, 0.004901141859591007, -0.0379210039973259, -0.15933041274547577, -0.03461240977048874, 0.02308213897049427, 0.005665509030222893, -0.05047399550676346, 0.018268251791596413, -0.02202410064637661, 0.06912460923194885, -0.019467785954475403, -0.10210232436656952, -0.020070066675543785, -0.009187442250549793, 0.06715362519025803, 0.011134219355881214, -0.003988295327872038, -0.0115163279697299, 0.1268645077943802, -0.0013399861054494977, -0.104340098798275, -0.07276006042957306, -0.05348803102970123, -0.1232190877199173, -0.04372725263237953, 0.015643183141946793, -0.08870410174131393, 0.04416664317250252, 0.20668697357177734, -0.0382450595498085, 0.026507334783673286, -0.09561003744602203, 0.0029861286748200655, 0.1356882005929947, 0.0760175883769989, -0.062998928129673, -0.05655349791049957, -0.016947219148278236, 0.00825058575719595, 0.02746952511370182, -0.018405592069029808, 0.010674449615180492, 0.05725441128015518, 0.044889338314533234, 0.11384681612253189, 0.11932046711444855, 0.02227320335805416, -0.05118239298462868, -0.028208700940012932, 0.22087199985980988, -0.15704697370529175, -0.010105853900313377, 0.004050760995596647, -0.041051894426345825, -0.07650600373744965, 0.054230086505413055, -0.010853351093828678, -0.06428978592157364, 0.11264848709106445, -0.04053197056055069, -0.07814610004425049, -0.08368504047393799, -0.046353358775377274, 0.050210315734148026, 0.00700257858261466, -0.015584425069391727, -0.07943993806838989, -0.11884306371212006, -0.07766670733690262, 0.010443138889968395, -0.07403478026390076, -0.03843516483902931, 0.037742409855127335, -0.00985752698034048, -0.02732798643410206, -0.02541540563106537, 0.12247881293296814, -0.04564389958977699, 0.03562931343913078, 0.01427107211202383, 0.004327543545514345, 0.07079163938760757, 0.04687105491757393, -0.1303148865699768, 0.07619641721248627, -0.10946794599294662, 0.10938072949647903, -0.10290677100419998, 0.0061567844823002815, -0.13516567647457123, 0.0018658210756257176, -0.03833279386162758, 0.006743885111063719, 0.005254982504993677, 0.11623165756464005, -0.24768435955047607, 0.021913789212703705, 0.11975853145122528, -0.10260245949029922, -0.07684989273548126, 0.06175759434700012, -0.031963348388671875, 0.06774811446666718, 0.04907670244574547, 0.09398286789655685, 0.08420832455158234, -0.049477532505989075, -0.10224474221467972, -0.06854376196861267, -0.02162908762693405, 0.1343676596879959, 0.06247233971953392, -0.06869357824325562, 0.09351778030395508, 0.04822425916790962, -0.03605863079428673, -0.08796438574790955, -0.006091531366109848, -0.057805564254522324, -0.025243721902370453, -0.03368435055017471, -0.07747651636600494, 0.010595771484076977, -0.08019695430994034, -0.023105818778276443, -0.09154342114925385, 0.008790304884314537, 0.09628143906593323, -0.017493348568677902, 0.00347672076895833, -0.05773913487792015, 0.03389349579811096, -0.002707841107621789, 0.024869389832019806, -0.21655206382274628, -0.091890849173069, 0.0491885244846344, -0.10939232259988785, 0.039082709699869156, 0.02245553769171238, 0.010490153916180134, 0.0325646735727787, 0.002998785814270377, 0.018206192180514336, 0.007421043235808611, 0.00032661823206581175, 0.006586967967450619, -0.1215452179312706, -0.048451341688632965, -0.0809314027428627, 0.07450324296951294, -0.144422709941864, -0.012318773195147514, 0.09025327116250992, 0.15179216861724854, -0.0004222338611725718, -0.08555272221565247, 0.07928020507097244, -0.011110775172710419, -0.029446057975292206, -0.07145164906978607, -0.004891199059784412, -0.016620635986328125, 0.04521634057164192, 0.018149612471461296, -0.19217662513256073, -0.13985657691955566, 0.06936510652303696, 0.12397059053182602, -0.06968291848897934, -0.05565918609499931, -0.05722077190876007, -0.0541338287293911, -0.08610793948173523, -0.06667853891849518, 0.05186416208744049, 0.07818783819675446, 0.024264110252261162, -0.06237519159913063, -0.06708428263664246, 0.008834649808704853, 0.056159161031246185, -0.06359221041202545, 0.08646611869335175, 0.09059258550405502, -0.11497741937637329, 0.10193070024251938, 0.009080103598535061, 0.1331782191991806, 0.06125947833061218, 0.005847499705851078, -0.10635336488485336, -0.010411368682980537, 0.04188430681824684, 0.022104134783148766, 0.10431654006242752, -0.023888804018497467, 0.04381152614951134, 0.08486252278089523, -0.01195837464183569, 0.0444633848965168, -0.06065422296524048, 0.028057459741830826, 0.044902026653289795, 0.004153343848884106, 0.010386813431978226, 0.005724667105823755, 0.014341824688017368, 0.056965313851833344, 0.024362340569496155, 0.10624083876609802, -0.014682784676551819, -0.04739875718951225, -0.08784689754247665, 0.14545957744121552, -0.09746751934289932, -0.26870569586753845, -0.14793938398361206, 0.00006484227196779102, -0.01861533522605896, -0.006443643942475319, 0.05255435034632683, -0.009760049171745777, -0.1047244518995285, -0.0863490030169487, 0.057512667030096054, 0.0030997921712696552, -0.13177534937858582, -0.04259564355015755, 0.049516066908836365, 0.0004989744047634304, -0.15867342054843903, 0.04216144606471062, 0.049448512494564056, -0.04625530168414116, 0.011523227207362652, 0.08381561934947968, 0.13754642009735107, 0.07361860573291779, 0.0382235050201416, -0.028406431898474693, 0.0016676952363923192, 0.17620131373405457, -0.11512725055217743, 0.03749477490782738, 0.11796799302101135, -0.035249192267656326, 0.05288007855415344, 0.16711100935935974, 0.011775192804634571, -0.0945616215467453, 0.03456272557377815, 0.05918673425912857, -0.07010585069656372, -0.2669520080089569, -0.08982430398464203, -0.020164750516414642, -0.007401667069643736, 0.09440550953149796, 0.0678986981511116, -0.007065754849463701, 0.04085288941860199, -0.10768251866102219, -0.02056240849196911, -0.051839303225278854, 0.06731627136468887, 0.0712452158331871, 0.005693302024155855, 0.042578473687171936, -0.044843483716249466, 0.03619115427136421, 0.10738234966993332, 0.046442195773124695, 0.1521790772676468, -0.030160855501890182, 0.15876755118370056, 0.07258279621601105, 0.09425890445709229, -0.04859934002161026, 0.057621221989393234, 0.02556580677628517, 0.07007919996976852, -0.005297364201396704, -0.1025632843375206, -0.059680014848709106, 0.08050335198640823, -0.004001426976174116, -0.0614333376288414, 0.03065701574087143, -0.05108330026268959, 0.03153950348496437, 0.17160889506340027, -0.02146846055984497, -0.15334008634090424, -0.06049652397632599, 0.06644318997859955, -0.0097267497330904, -0.10814838856458664, -0.016974352300167084, 0.09490054845809937, -0.14973057806491852, 0.02055586874485016, -0.026433542370796204, 0.08207008987665176, -0.12530486285686493, -0.007011625915765762, -0.012117206119000912, 0.07433827221393585, 0.0045266118831932545, 0.12271551787853241, -0.13708405196666718, 0.10499497503042221, 0.0006571695557795465, 0.022701779380440712, -0.0905393585562706, 0.0686410665512085, -0.04935322701931, -0.015360388904809952, 0.1504337042570114, -0.010004241950809956, -0.1287839412689209, -0.03921378776431084, -0.11192779988050461, 0.0143780168145895, 0.05562802031636238, -0.12123626470565796, 0.09608417004346848, 0.03397670388221741, -0.010173140093684196, -0.028623346239328384, -0.010732738301157951, -0.13531163334846497, -0.22980822622776031, 0.11071903258562088, -0.10938511043787003, 0.07137379795312881, -0.05529923737049103, -0.025523491203784943, -0.052472345530986786, 0.16429167985916138, -0.08659061044454575, -0.06843850016593933, -0.12178211659193039, 0.0350157767534256, 0.17587608098983765, -0.04898921400308609, 0.028257157653570175, -0.036300092935562134, 0.171388640999794, 0.008382412604987621, -0.06915651261806488, 0.002011436503380537, -0.09121370315551758, -0.14429067075252533, -0.03732176870107651, 0.12953467667102814, 0.05355455353856087, 0.009743684902787209, 0.007982674054801464, 0.03900047764182091, 0.004404073115438223, -0.08790043741464615, 0.044811759144067764, 0.08460556715726852, 0.10255338251590729, 0.04232249781489372, -0.02739201858639717, -0.11441297829151154, -0.10658916085958481, -0.08785919845104218, 0.06522636860609055, 0.127974733710289, -0.06472960114479065, 0.1497829705476761, 0.1188599020242691, -0.0960652306675911, -0.18500640988349915, -0.04431351646780968, 0.011928818188607693, -0.010559644550085068, 0.12341582030057907, -0.21210065484046936, 0.05574251711368561, 0.06653153151273727, -0.00738163385540247, 0.06884165108203888, -0.23273350298404694, -0.12622380256652832, 0.055557023733854294, 0.017406560480594635, -0.2192535400390625, -0.17269402742385864, -0.11413924396038055, -0.019319778308272362, -0.19018436968326569, 0.15201158821582794, -0.006591235287487507, 0.009602850303053856, -0.004406313877552748, 0.09168116003274918, 0.06559532135725021, -0.05403007194399834, 0.13973656296730042, -0.0031661614775657654, 0.020138680934906006, -0.09502312541007996, -0.027465900406241417, 0.01622779481112957, -0.036805301904678345, 0.09470416605472565, 0.06570085138082504, 0.05738189071416855, -0.09835290908813477, -0.02781716361641884, -0.053495168685913086, 0.04657967388629913, -0.07251612097024918, -0.06587705761194229, -0.053098976612091064, 0.07181338220834732, 0.0722331777215004, -0.028245117515325546, 0.04829544946551323, -0.02749190479516983, 0.0474172905087471, 0.21434442698955536, 0.10328542441129684, 0.05363462120294571, -0.10124994069337845, -0.012525418773293495, -0.005797943565994501, 0.009015017189085484, -0.15858420729637146, 0.0457579605281353, 0.08194933831691742, 0.04110311344265938, 0.05916415899991989, -0.024566449224948883, -0.18717604875564575, -0.01828114315867424, 0.0675727054476738, -0.1123054176568985, -0.22434452176094055, 0.01924286037683487, 0.1441868543624878, -0.1202157661318779, -0.04355285316705704, 0.07868698984384537, 0.00845247134566307, -0.03700549155473709, 0.005748821422457695, 0.07463864237070084, 0.055530257523059845, 0.10832816362380981, 0.004267064854502678, 0.06107752025127411, -0.08462443947792053, 0.1166032925248146, 0.15685349702835083, -0.09771987795829773, 0.01256213616579771, 0.059473808854818344, -0.058107517659664154, -0.052787914872169495, 0.016152219846844673, -0.02148095890879631, 0.028860267251729965, -0.05438971146941185, 0.02251456491649151, -0.03615397959947586, 0.04021097347140312, 0.05955066159367561, 0.0025736060924828053, 0.034922897815704346, 0.02734548971056938, -0.013645830564200878, -0.08852677792310715, 0.08487985283136368, 0.02492486871778965, 0.0486864373087883, -0.04350993037223816, 0.023427793756127357, 0.0001907903206301853, 0.005723257549107075, 0.0094026243314147, -0.040812812745571136, -0.0371895506978035, -0.005810868926346302, -0.1395876258611679, 0.013852745294570923, -0.07817136496305466, -0.011834758333861828, -0.00014899478992447257, -0.014370590448379517, -0.009832050651311874, 0.016257131472229958, -0.056598495692014694, -0.05266325920820236, -0.04884243756532669, 0.11100897938013077, -0.18033625185489655, 0.0009588737739250064, 0.08430028706789017, -0.08058977872133255, 0.07747142016887665, 0.012592609971761703, -0.010632294230163097, 0.013648388907313347, -0.0932237058877945, -0.034295253455638885, -0.02058999612927437, 0.06816530227661133, 0.013861548155546188, -0.1503620594739914, -0.018343117088079453, 0.01399591937661171, -0.08426006138324738, -0.026280784979462624, 0.04756723344326019, -0.15121044218540192, 0.027136579155921936, 0.051490090787410736, -0.031513262540102005, -0.04733020067214966, 0.04774205759167671, 0.03012678027153015, 0.01584957353770733, 0.0857958272099495, -0.004810180515050888, 0.037910424172878265, -0.1516602337360382, -0.053392194211483, -0.0038987831212580204, -0.0019300157437101007, 0.007162847556173801, 0.026763681322336197, 0.05187216401100159, -0.00041226245230063796, 0.20282085239887238, -0.018880510702729225, 0.08479049801826477, 0.0375160351395607, 0.009474608115851879, -0.04874058812856674, 0.019972283393144608, 0.03326112776994705, 0.023334136232733727, 0.02703005075454712, 0.028947738930583, -0.03438214212656021, -0.047006040811538696, -0.03601473569869995, 0.07044586539268494, 0.1469588428735733, 0.144089013338089, -0.04004036262631416, 0.071391761302948, -0.1606646180152893, -0.045135390013456345, 0.045353952795267105, -0.030055589973926544, 0.044695351272821426, -0.07501626014709473, 0.0373724140226841, 0.06996830552816391, -0.11001385003328323, 0.13875076174736023, -0.06840641051530838, -0.04189608246088028, -0.04580029100179672, -0.1404157280921936, -0.033214934170246124, 0.0048599448055028915, 0.010094714350998402, -0.09754355251789093, 0.09705544263124466, 0.1187971755862236, -0.021400945261120796, -0.024923069402575493, 0.08522839099168777, -0.04721551388502121, -0.06854372471570969, -0.0302809476852417, 0.013753288425505161, 0.02456461265683174, -0.02306942082941532, 0.08893441408872604, 0.021331124007701874, 0.0871536135673523, 0.0759318470954895, 0.08676309883594513, 0.06992325186729431, 0.01956094801425934, -0.044611699879169464, -0.05785268172621727, 0.00041247825720347464, -0.005422103218734264, -0.0547117218375206, 0.1692076325416565, 0.04948975145816803, 0.019320432096719742, 0.01093091256916523, 0.23228375613689423, -0.007281478028744459, -0.07237100601196289, -0.1280430406332016, 0.07506555318832397, -0.000014999041013652459, 0.029863815754652023, 0.05425417795777321, -0.14118266105651855, 0.01661105267703533, 0.15371078252792358, 0.10718456655740738, 0.04947361722588539, 0.009597101248800755, 0.027112865820527077, 0.02427956648170948, -0.05243229120969772, 0.04685727134346962, 0.04035618528723717, 0.17240536212921143, -0.05117781087756157, 0.0759025439620018, -0.005308847874403, -0.012170763686299324, -0.03339029476046562, 0.07587333023548126, -0.0458146296441555, 0.0031089852564036846, -0.038574639707803726, 0.1099555492401123, -0.0339505597949028, -0.29221418499946594, -0.009052574634552002, -0.08854322135448456, -0.12865868210792542, -0.03340723738074303, 0.046679746359586716, -0.03932018578052521, 0.02592563070356846, 0.02582252211868763, -0.01156194880604744, 0.21061713993549347, 0.021977374330163002, -0.07601003348827362, -0.03402434661984444, 0.0807986706495285, -0.038361985236406326, 0.23353265225887299, 0.0027134015690535307, 0.07465848326683044, 0.07870267331600189, -0.01996130309998989, -0.1774781197309494, 0.024453671649098396, 0.1054699495434761, -0.03801555931568146, 0.06515762209892273, 0.16611148416996002, -0.02327745035290718, 0.1256989985704422, 0.04951030761003494, -0.025844823569059372, 0.03392428159713745, 0.0786915048956871, 0.04984473064541817, -0.08467849344015121, 0.07199443876743317, -0.08205107599496841, 0.12938755750656128, 0.10235907137393951, -0.028194187209010124, 0.0070600928738713264, -0.08456025272607803, 0.069081149995327, -0.022858478128910065, 0.12193062901496887, -0.0032468202989548445, -0.12495645880699158, 0.02665887400507927, 0.02294856123626232, 0.0689549669623375, -0.20073555409908295, -0.048694267868995667, 0.1150016114115715, -0.04900166764855385, -0.0026406128890812397, 0.10236689448356628, 0.04366810619831085, 0.01677699387073517, -0.07146172970533371, -0.0816374272108078, -0.00875993724912405, 0.11556398868560791, -0.08923476934432983, -0.032641589641571045 ]
df0efc55a0dec766537c2c1eee9a0d63f25c4b5a
# Dataset Card for Evaluation run of macadeliccc/SOLAR-10.7b-Instruct-dpo <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [macadeliccc/SOLAR-10.7b-Instruct-dpo](https://huggingface.co/macadeliccc/SOLAR-10.7b-Instruct-dpo) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_macadeliccc__SOLAR-10.7b-Instruct-dpo", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-01-25T05:53:40.914982](https://huggingface.co/datasets/open-llm-leaderboard/details_macadeliccc__SOLAR-10.7b-Instruct-dpo/blob/main/results_2024-01-25T05-53-40.914982.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.6636898445574112, "acc_stderr": 0.031769837970977544, "acc_norm": 0.6652603795002481, "acc_norm_stderr": 0.03240911882592747, "mc1": 0.5642594859241126, "mc1_stderr": 0.01735834539886313, "mc2": 0.719754146671385, "mc2_stderr": 0.014988200007339842 }, "harness|arc:challenge|25": { "acc": 0.6885665529010239, "acc_stderr": 0.013532472099850945, "acc_norm": 0.7175767918088737, "acc_norm_stderr": 0.013155456884097222 }, "harness|hellaswag|10": { "acc": 0.7110137422824139, "acc_stderr": 0.00452365118401626, "acc_norm": 0.8808006373232424, "acc_norm_stderr": 0.003233607423889983 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.42, "acc_stderr": 0.04960449637488583, "acc_norm": 0.42, "acc_norm_stderr": 0.04960449637488583 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.6148148148148148, "acc_stderr": 0.04203921040156279, "acc_norm": 0.6148148148148148, "acc_norm_stderr": 0.04203921040156279 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.743421052631579, "acc_stderr": 0.0355418036802569, "acc_norm": 0.743421052631579, "acc_norm_stderr": 0.0355418036802569 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.73, "acc_stderr": 0.04461960433384741, "acc_norm": 0.73, "acc_norm_stderr": 0.04461960433384741 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.6867924528301886, "acc_stderr": 0.028544793319055326, "acc_norm": 0.6867924528301886, "acc_norm_stderr": 0.028544793319055326 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.7569444444444444, "acc_stderr": 0.03586879280080341, "acc_norm": 0.7569444444444444, "acc_norm_stderr": 0.03586879280080341 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.46, "acc_stderr": 0.05009082659620333, "acc_norm": 0.46, "acc_norm_stderr": 0.05009082659620333 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.51, "acc_stderr": 0.05024183937956912, "acc_norm": 0.51, "acc_norm_stderr": 0.05024183937956912 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.31, "acc_stderr": 0.04648231987117316, "acc_norm": 0.31, "acc_norm_stderr": 0.04648231987117316 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.6820809248554913, "acc_stderr": 0.0355068398916558, "acc_norm": 0.6820809248554913, "acc_norm_stderr": 0.0355068398916558 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.37254901960784315, "acc_stderr": 0.04810840148082636, "acc_norm": 0.37254901960784315, "acc_norm_stderr": 0.04810840148082636 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.73, "acc_stderr": 0.04461960433384739, "acc_norm": 0.73, "acc_norm_stderr": 0.04461960433384739 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.6340425531914894, "acc_stderr": 0.031489558297455304, "acc_norm": 0.6340425531914894, "acc_norm_stderr": 0.031489558297455304 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.5087719298245614, "acc_stderr": 0.04702880432049615, "acc_norm": 0.5087719298245614, "acc_norm_stderr": 0.04702880432049615 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.6137931034482759, "acc_stderr": 0.04057324734419035, "acc_norm": 0.6137931034482759, "acc_norm_stderr": 0.04057324734419035 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.47883597883597884, "acc_stderr": 0.025728230952130733, "acc_norm": 0.47883597883597884, "acc_norm_stderr": 0.025728230952130733 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.42857142857142855, "acc_stderr": 0.0442626668137991, "acc_norm": 0.42857142857142855, "acc_norm_stderr": 0.0442626668137991 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.35, "acc_stderr": 0.047937248544110196, "acc_norm": 0.35, "acc_norm_stderr": 0.047937248544110196 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.8129032258064516, "acc_stderr": 0.022185710092252255, "acc_norm": 0.8129032258064516, "acc_norm_stderr": 0.022185710092252255 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.4975369458128079, "acc_stderr": 0.035179450386910616, "acc_norm": 0.4975369458128079, "acc_norm_stderr": 0.035179450386910616 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.72, "acc_stderr": 0.04512608598542128, "acc_norm": 0.72, "acc_norm_stderr": 0.04512608598542128 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.806060606060606, "acc_stderr": 0.03087414513656208, "acc_norm": 0.806060606060606, "acc_norm_stderr": 0.03087414513656208 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.8585858585858586, "acc_stderr": 0.02482590979334334, "acc_norm": 0.8585858585858586, "acc_norm_stderr": 0.02482590979334334 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.8963730569948186, "acc_stderr": 0.021995311963644244, "acc_norm": 0.8963730569948186, "acc_norm_stderr": 0.021995311963644244 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.6692307692307692, "acc_stderr": 0.02385479568097114, "acc_norm": 0.6692307692307692, "acc_norm_stderr": 0.02385479568097114 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.37037037037037035, "acc_stderr": 0.02944316932303154, "acc_norm": 0.37037037037037035, "acc_norm_stderr": 0.02944316932303154 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.6974789915966386, "acc_stderr": 0.029837962388291936, "acc_norm": 0.6974789915966386, "acc_norm_stderr": 0.029837962388291936 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.3841059602649007, "acc_stderr": 0.03971301814719197, "acc_norm": 0.3841059602649007, "acc_norm_stderr": 0.03971301814719197 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.8422018348623853, "acc_stderr": 0.01563002297009244, "acc_norm": 0.8422018348623853, "acc_norm_stderr": 0.01563002297009244 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.5694444444444444, "acc_stderr": 0.03376922151252335, "acc_norm": 0.5694444444444444, "acc_norm_stderr": 0.03376922151252335 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.8480392156862745, "acc_stderr": 0.0251956584289318, "acc_norm": 0.8480392156862745, "acc_norm_stderr": 0.0251956584289318 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.8649789029535865, "acc_stderr": 0.022245776632003694, "acc_norm": 0.8649789029535865, "acc_norm_stderr": 0.022245776632003694 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.6860986547085202, "acc_stderr": 0.03114679648297246, "acc_norm": 0.6860986547085202, "acc_norm_stderr": 0.03114679648297246 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.7480916030534351, "acc_stderr": 0.03807387116306086, "acc_norm": 0.7480916030534351, "acc_norm_stderr": 0.03807387116306086 }, "harness|hendrycksTest-international_law|5": { "acc": 0.7851239669421488, "acc_stderr": 0.037494924487096966, "acc_norm": 0.7851239669421488, "acc_norm_stderr": 0.037494924487096966 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.7777777777777778, "acc_stderr": 0.040191074725573483, "acc_norm": 0.7777777777777778, "acc_norm_stderr": 0.040191074725573483 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.7423312883435583, "acc_stderr": 0.03436150827846917, "acc_norm": 0.7423312883435583, "acc_norm_stderr": 0.03436150827846917 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.44642857142857145, "acc_stderr": 0.04718471485219588, "acc_norm": 0.44642857142857145, "acc_norm_stderr": 0.04718471485219588 }, "harness|hendrycksTest-management|5": { "acc": 0.8446601941747572, "acc_stderr": 0.03586594738573974, "acc_norm": 0.8446601941747572, "acc_norm_stderr": 0.03586594738573974 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8504273504273504, "acc_stderr": 0.02336505149175372, "acc_norm": 0.8504273504273504, "acc_norm_stderr": 0.02336505149175372 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.71, "acc_stderr": 0.045604802157206845, "acc_norm": 0.71, "acc_norm_stderr": 0.045604802157206845 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.8020434227330779, "acc_stderr": 0.014248873549217575, "acc_norm": 0.8020434227330779, "acc_norm_stderr": 0.014248873549217575 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.7514450867052023, "acc_stderr": 0.023267528432100174, "acc_norm": 0.7514450867052023, "acc_norm_stderr": 0.023267528432100174 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.40893854748603353, "acc_stderr": 0.01644283065471554, "acc_norm": 0.40893854748603353, "acc_norm_stderr": 0.01644283065471554 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.7647058823529411, "acc_stderr": 0.0242886194660461, "acc_norm": 0.7647058823529411, "acc_norm_stderr": 0.0242886194660461 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.7170418006430869, "acc_stderr": 0.02558306248998482, "acc_norm": 0.7170418006430869, "acc_norm_stderr": 0.02558306248998482 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.7870370370370371, "acc_stderr": 0.0227797190887334, "acc_norm": 0.7870370370370371, "acc_norm_stderr": 0.0227797190887334 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.5177304964539007, "acc_stderr": 0.02980873964223777, "acc_norm": 0.5177304964539007, "acc_norm_stderr": 0.02980873964223777 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.4863102998696219, "acc_stderr": 0.012765448722610085, "acc_norm": 0.4863102998696219, "acc_norm_stderr": 0.012765448722610085 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.75, "acc_stderr": 0.026303648393696036, "acc_norm": 0.75, "acc_norm_stderr": 0.026303648393696036 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.6879084967320261, "acc_stderr": 0.01874501120127766, "acc_norm": 0.6879084967320261, "acc_norm_stderr": 0.01874501120127766 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6909090909090909, "acc_stderr": 0.044262946482000985, "acc_norm": 0.6909090909090909, "acc_norm_stderr": 0.044262946482000985 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.7428571428571429, "acc_stderr": 0.027979823538744546, "acc_norm": 0.7428571428571429, "acc_norm_stderr": 0.027979823538744546 }, "harness|hendrycksTest-sociology|5": { "acc": 0.845771144278607, "acc_stderr": 0.025538433368578337, "acc_norm": 0.845771144278607, "acc_norm_stderr": 0.025538433368578337 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.89, "acc_stderr": 0.03144660377352203, "acc_norm": 0.89, "acc_norm_stderr": 0.03144660377352203 }, "harness|hendrycksTest-virology|5": { "acc": 0.5843373493975904, "acc_stderr": 0.03836722176598052, "acc_norm": 0.5843373493975904, "acc_norm_stderr": 0.03836722176598052 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.7660818713450293, "acc_stderr": 0.03246721765117826, "acc_norm": 0.7660818713450293, "acc_norm_stderr": 0.03246721765117826 }, "harness|truthfulqa:mc|0": { "mc1": 0.5642594859241126, "mc1_stderr": 0.01735834539886313, "mc2": 0.719754146671385, "mc2_stderr": 0.014988200007339842 }, "harness|winogrande|5": { "acc": 0.8232044198895028, "acc_stderr": 0.01072192328791874 }, "harness|gsm8k|5": { "acc": 0.6103108415466262, "acc_stderr": 0.013433123236110713 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_macadeliccc__SOLAR-10.7b-Instruct-dpo
[ "region:us" ]
2024-01-25T05:55:59+00:00
{"pretty_name": "Evaluation run of macadeliccc/SOLAR-10.7b-Instruct-dpo", "dataset_summary": "Dataset automatically created during the evaluation run of model [macadeliccc/SOLAR-10.7b-Instruct-dpo](https://huggingface.co/macadeliccc/SOLAR-10.7b-Instruct-dpo) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_macadeliccc__SOLAR-10.7b-Instruct-dpo\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-01-25T05:53:40.914982](https://huggingface.co/datasets/open-llm-leaderboard/details_macadeliccc__SOLAR-10.7b-Instruct-dpo/blob/main/results_2024-01-25T05-53-40.914982.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.6636898445574112,\n \"acc_stderr\": 0.031769837970977544,\n \"acc_norm\": 0.6652603795002481,\n \"acc_norm_stderr\": 0.03240911882592747,\n \"mc1\": 0.5642594859241126,\n \"mc1_stderr\": 0.01735834539886313,\n \"mc2\": 0.719754146671385,\n \"mc2_stderr\": 0.014988200007339842\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.6885665529010239,\n \"acc_stderr\": 0.013532472099850945,\n \"acc_norm\": 0.7175767918088737,\n \"acc_norm_stderr\": 0.013155456884097222\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.7110137422824139,\n \"acc_stderr\": 0.00452365118401626,\n \"acc_norm\": 0.8808006373232424,\n \"acc_norm_stderr\": 0.003233607423889983\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.42,\n \"acc_stderr\": 0.04960449637488583,\n \"acc_norm\": 0.42,\n \"acc_norm_stderr\": 0.04960449637488583\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.6148148148148148,\n \"acc_stderr\": 0.04203921040156279,\n \"acc_norm\": 0.6148148148148148,\n \"acc_norm_stderr\": 0.04203921040156279\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.743421052631579,\n \"acc_stderr\": 0.0355418036802569,\n \"acc_norm\": 0.743421052631579,\n \"acc_norm_stderr\": 0.0355418036802569\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.73,\n \"acc_stderr\": 0.04461960433384741,\n \"acc_norm\": 0.73,\n \"acc_norm_stderr\": 0.04461960433384741\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.6867924528301886,\n \"acc_stderr\": 0.028544793319055326,\n \"acc_norm\": 0.6867924528301886,\n \"acc_norm_stderr\": 0.028544793319055326\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.7569444444444444,\n \"acc_stderr\": 0.03586879280080341,\n \"acc_norm\": 0.7569444444444444,\n \"acc_norm_stderr\": 0.03586879280080341\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.46,\n \"acc_stderr\": 0.05009082659620333,\n \"acc_norm\": 0.46,\n \"acc_norm_stderr\": 0.05009082659620333\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.51,\n \"acc_stderr\": 0.05024183937956912,\n \"acc_norm\": 0.51,\n \"acc_norm_stderr\": 0.05024183937956912\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.31,\n \"acc_stderr\": 0.04648231987117316,\n \"acc_norm\": 0.31,\n \"acc_norm_stderr\": 0.04648231987117316\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.6820809248554913,\n \"acc_stderr\": 0.0355068398916558,\n \"acc_norm\": 0.6820809248554913,\n \"acc_norm_stderr\": 0.0355068398916558\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.37254901960784315,\n \"acc_stderr\": 0.04810840148082636,\n \"acc_norm\": 0.37254901960784315,\n \"acc_norm_stderr\": 0.04810840148082636\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.73,\n \"acc_stderr\": 0.04461960433384739,\n \"acc_norm\": 0.73,\n \"acc_norm_stderr\": 0.04461960433384739\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.6340425531914894,\n \"acc_stderr\": 0.031489558297455304,\n \"acc_norm\": 0.6340425531914894,\n \"acc_norm_stderr\": 0.031489558297455304\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.5087719298245614,\n \"acc_stderr\": 0.04702880432049615,\n \"acc_norm\": 0.5087719298245614,\n \"acc_norm_stderr\": 0.04702880432049615\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.6137931034482759,\n \"acc_stderr\": 0.04057324734419035,\n \"acc_norm\": 0.6137931034482759,\n \"acc_norm_stderr\": 0.04057324734419035\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.47883597883597884,\n \"acc_stderr\": 0.025728230952130733,\n \"acc_norm\": 0.47883597883597884,\n \"acc_norm_stderr\": 0.025728230952130733\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.42857142857142855,\n \"acc_stderr\": 0.0442626668137991,\n \"acc_norm\": 0.42857142857142855,\n \"acc_norm_stderr\": 0.0442626668137991\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.35,\n \"acc_stderr\": 0.047937248544110196,\n \"acc_norm\": 0.35,\n \"acc_norm_stderr\": 0.047937248544110196\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.8129032258064516,\n \"acc_stderr\": 0.022185710092252255,\n \"acc_norm\": 0.8129032258064516,\n \"acc_norm_stderr\": 0.022185710092252255\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.4975369458128079,\n \"acc_stderr\": 0.035179450386910616,\n \"acc_norm\": 0.4975369458128079,\n \"acc_norm_stderr\": 0.035179450386910616\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.72,\n \"acc_stderr\": 0.04512608598542128,\n \"acc_norm\": 0.72,\n \"acc_norm_stderr\": 0.04512608598542128\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.806060606060606,\n \"acc_stderr\": 0.03087414513656208,\n \"acc_norm\": 0.806060606060606,\n \"acc_norm_stderr\": 0.03087414513656208\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.8585858585858586,\n \"acc_stderr\": 0.02482590979334334,\n \"acc_norm\": 0.8585858585858586,\n \"acc_norm_stderr\": 0.02482590979334334\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.8963730569948186,\n \"acc_stderr\": 0.021995311963644244,\n \"acc_norm\": 0.8963730569948186,\n \"acc_norm_stderr\": 0.021995311963644244\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.6692307692307692,\n \"acc_stderr\": 0.02385479568097114,\n \"acc_norm\": 0.6692307692307692,\n \"acc_norm_stderr\": 0.02385479568097114\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.37037037037037035,\n \"acc_stderr\": 0.02944316932303154,\n \"acc_norm\": 0.37037037037037035,\n \"acc_norm_stderr\": 0.02944316932303154\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.6974789915966386,\n \"acc_stderr\": 0.029837962388291936,\n \"acc_norm\": 0.6974789915966386,\n \"acc_norm_stderr\": 0.029837962388291936\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.3841059602649007,\n \"acc_stderr\": 0.03971301814719197,\n \"acc_norm\": 0.3841059602649007,\n \"acc_norm_stderr\": 0.03971301814719197\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.8422018348623853,\n \"acc_stderr\": 0.01563002297009244,\n \"acc_norm\": 0.8422018348623853,\n \"acc_norm_stderr\": 0.01563002297009244\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.5694444444444444,\n \"acc_stderr\": 0.03376922151252335,\n \"acc_norm\": 0.5694444444444444,\n \"acc_norm_stderr\": 0.03376922151252335\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.8480392156862745,\n \"acc_stderr\": 0.0251956584289318,\n \"acc_norm\": 0.8480392156862745,\n \"acc_norm_stderr\": 0.0251956584289318\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.8649789029535865,\n \"acc_stderr\": 0.022245776632003694,\n \"acc_norm\": 0.8649789029535865,\n \"acc_norm_stderr\": 0.022245776632003694\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.6860986547085202,\n \"acc_stderr\": 0.03114679648297246,\n \"acc_norm\": 0.6860986547085202,\n \"acc_norm_stderr\": 0.03114679648297246\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.7480916030534351,\n \"acc_stderr\": 0.03807387116306086,\n \"acc_norm\": 0.7480916030534351,\n \"acc_norm_stderr\": 0.03807387116306086\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.7851239669421488,\n \"acc_stderr\": 0.037494924487096966,\n \"acc_norm\": 0.7851239669421488,\n \"acc_norm_stderr\": 0.037494924487096966\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.7777777777777778,\n \"acc_stderr\": 0.040191074725573483,\n \"acc_norm\": 0.7777777777777778,\n \"acc_norm_stderr\": 0.040191074725573483\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.7423312883435583,\n \"acc_stderr\": 0.03436150827846917,\n \"acc_norm\": 0.7423312883435583,\n \"acc_norm_stderr\": 0.03436150827846917\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.44642857142857145,\n \"acc_stderr\": 0.04718471485219588,\n \"acc_norm\": 0.44642857142857145,\n \"acc_norm_stderr\": 0.04718471485219588\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.8446601941747572,\n \"acc_stderr\": 0.03586594738573974,\n \"acc_norm\": 0.8446601941747572,\n \"acc_norm_stderr\": 0.03586594738573974\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8504273504273504,\n \"acc_stderr\": 0.02336505149175372,\n \"acc_norm\": 0.8504273504273504,\n \"acc_norm_stderr\": 0.02336505149175372\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.71,\n \"acc_stderr\": 0.045604802157206845,\n \"acc_norm\": 0.71,\n \"acc_norm_stderr\": 0.045604802157206845\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.8020434227330779,\n \"acc_stderr\": 0.014248873549217575,\n \"acc_norm\": 0.8020434227330779,\n \"acc_norm_stderr\": 0.014248873549217575\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.7514450867052023,\n \"acc_stderr\": 0.023267528432100174,\n \"acc_norm\": 0.7514450867052023,\n \"acc_norm_stderr\": 0.023267528432100174\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.40893854748603353,\n \"acc_stderr\": 0.01644283065471554,\n \"acc_norm\": 0.40893854748603353,\n \"acc_norm_stderr\": 0.01644283065471554\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.7647058823529411,\n \"acc_stderr\": 0.0242886194660461,\n \"acc_norm\": 0.7647058823529411,\n \"acc_norm_stderr\": 0.0242886194660461\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.7170418006430869,\n \"acc_stderr\": 0.02558306248998482,\n \"acc_norm\": 0.7170418006430869,\n \"acc_norm_stderr\": 0.02558306248998482\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.7870370370370371,\n \"acc_stderr\": 0.0227797190887334,\n \"acc_norm\": 0.7870370370370371,\n \"acc_norm_stderr\": 0.0227797190887334\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.5177304964539007,\n \"acc_stderr\": 0.02980873964223777,\n \"acc_norm\": 0.5177304964539007,\n \"acc_norm_stderr\": 0.02980873964223777\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.4863102998696219,\n \"acc_stderr\": 0.012765448722610085,\n \"acc_norm\": 0.4863102998696219,\n \"acc_norm_stderr\": 0.012765448722610085\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.75,\n \"acc_stderr\": 0.026303648393696036,\n \"acc_norm\": 0.75,\n \"acc_norm_stderr\": 0.026303648393696036\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.6879084967320261,\n \"acc_stderr\": 0.01874501120127766,\n \"acc_norm\": 0.6879084967320261,\n \"acc_norm_stderr\": 0.01874501120127766\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6909090909090909,\n \"acc_stderr\": 0.044262946482000985,\n \"acc_norm\": 0.6909090909090909,\n \"acc_norm_stderr\": 0.044262946482000985\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.7428571428571429,\n \"acc_stderr\": 0.027979823538744546,\n \"acc_norm\": 0.7428571428571429,\n \"acc_norm_stderr\": 0.027979823538744546\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.845771144278607,\n \"acc_stderr\": 0.025538433368578337,\n \"acc_norm\": 0.845771144278607,\n \"acc_norm_stderr\": 0.025538433368578337\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.89,\n \"acc_stderr\": 0.03144660377352203,\n \"acc_norm\": 0.89,\n \"acc_norm_stderr\": 0.03144660377352203\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5843373493975904,\n \"acc_stderr\": 0.03836722176598052,\n \"acc_norm\": 0.5843373493975904,\n \"acc_norm_stderr\": 0.03836722176598052\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.7660818713450293,\n \"acc_stderr\": 0.03246721765117826,\n \"acc_norm\": 0.7660818713450293,\n \"acc_norm_stderr\": 0.03246721765117826\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.5642594859241126,\n \"mc1_stderr\": 0.01735834539886313,\n \"mc2\": 0.719754146671385,\n \"mc2_stderr\": 0.014988200007339842\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.8232044198895028,\n \"acc_stderr\": 0.01072192328791874\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.6103108415466262,\n \"acc_stderr\": 0.013433123236110713\n }\n}\n```", "repo_url": "https://huggingface.co/macadeliccc/SOLAR-10.7b-Instruct-dpo", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_01_25T05_53_40.914982", "path": ["**/details_harness|arc:challenge|25_2024-01-25T05-53-40.914982.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-01-25T05-53-40.914982.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_01_25T05_53_40.914982", "path": ["**/details_harness|gsm8k|5_2024-01-25T05-53-40.914982.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-01-25T05-53-40.914982.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_01_25T05_53_40.914982", "path": ["**/details_harness|hellaswag|10_2024-01-25T05-53-40.914982.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-01-25T05-53-40.914982.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_01_25T05_53_40.914982", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-25T05-53-40.914982.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-25T05-53-40.914982.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-25T05-53-40.914982.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-25T05-53-40.914982.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-25T05-53-40.914982.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-25T05-53-40.914982.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-25T05-53-40.914982.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-25T05-53-40.914982.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-25T05-53-40.914982.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-25T05-53-40.914982.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-25T05-53-40.914982.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-25T05-53-40.914982.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-25T05-53-40.914982.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-25T05-53-40.914982.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-25T05-53-40.914982.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-25T05-53-40.914982.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-25T05-53-40.914982.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-25T05-53-40.914982.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-25T05-53-40.914982.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-25T05-53-40.914982.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-25T05-53-40.914982.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-25T05-53-40.914982.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-25T05-53-40.914982.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-25T05-53-40.914982.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-25T05-53-40.914982.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-25T05-53-40.914982.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-25T05-53-40.914982.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-25T05-53-40.914982.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-25T05-53-40.914982.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-25T05-53-40.914982.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-25T05-53-40.914982.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-25T05-53-40.914982.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-25T05-53-40.914982.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-25T05-53-40.914982.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-25T05-53-40.914982.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-25T05-53-40.914982.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-25T05-53-40.914982.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-25T05-53-40.914982.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-25T05-53-40.914982.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-25T05-53-40.914982.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-25T05-53-40.914982.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-25T05-53-40.914982.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-25T05-53-40.914982.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-25T05-53-40.914982.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-25T05-53-40.914982.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-25T05-53-40.914982.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-25T05-53-40.914982.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-25T05-53-40.914982.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-25T05-53-40.914982.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-25T05-53-40.914982.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-25T05-53-40.914982.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-25T05-53-40.914982.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-25T05-53-40.914982.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-25T05-53-40.914982.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-25T05-53-40.914982.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-25T05-53-40.914982.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-25T05-53-40.914982.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-25T05-53-40.914982.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-25T05-53-40.914982.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-25T05-53-40.914982.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-25T05-53-40.914982.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-25T05-53-40.914982.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-25T05-53-40.914982.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-25T05-53-40.914982.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-25T05-53-40.914982.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-25T05-53-40.914982.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-25T05-53-40.914982.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-25T05-53-40.914982.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-25T05-53-40.914982.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-25T05-53-40.914982.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-25T05-53-40.914982.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-25T05-53-40.914982.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-25T05-53-40.914982.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-25T05-53-40.914982.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-25T05-53-40.914982.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-25T05-53-40.914982.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-25T05-53-40.914982.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-25T05-53-40.914982.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-25T05-53-40.914982.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-25T05-53-40.914982.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-25T05-53-40.914982.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-25T05-53-40.914982.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-25T05-53-40.914982.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-25T05-53-40.914982.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-25T05-53-40.914982.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-25T05-53-40.914982.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-25T05-53-40.914982.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-25T05-53-40.914982.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-25T05-53-40.914982.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-25T05-53-40.914982.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-25T05-53-40.914982.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-25T05-53-40.914982.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-25T05-53-40.914982.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-25T05-53-40.914982.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-25T05-53-40.914982.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-25T05-53-40.914982.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-25T05-53-40.914982.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-25T05-53-40.914982.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-25T05-53-40.914982.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-25T05-53-40.914982.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-25T05-53-40.914982.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-25T05-53-40.914982.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-25T05-53-40.914982.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-25T05-53-40.914982.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-25T05-53-40.914982.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-25T05-53-40.914982.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-25T05-53-40.914982.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-25T05-53-40.914982.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-25T05-53-40.914982.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-25T05-53-40.914982.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-25T05-53-40.914982.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-25T05-53-40.914982.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-25T05-53-40.914982.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-25T05-53-40.914982.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_01_25T05_53_40.914982", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-25T05-53-40.914982.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-25T05-53-40.914982.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_01_25T05_53_40.914982", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-25T05-53-40.914982.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-25T05-53-40.914982.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_01_25T05_53_40.914982", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-25T05-53-40.914982.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-25T05-53-40.914982.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_01_25T05_53_40.914982", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-25T05-53-40.914982.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-25T05-53-40.914982.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_01_25T05_53_40.914982", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-25T05-53-40.914982.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-25T05-53-40.914982.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_01_25T05_53_40.914982", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-25T05-53-40.914982.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-25T05-53-40.914982.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_01_25T05_53_40.914982", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-25T05-53-40.914982.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-25T05-53-40.914982.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_01_25T05_53_40.914982", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-25T05-53-40.914982.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-25T05-53-40.914982.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_01_25T05_53_40.914982", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-25T05-53-40.914982.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-25T05-53-40.914982.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_01_25T05_53_40.914982", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-25T05-53-40.914982.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-25T05-53-40.914982.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_01_25T05_53_40.914982", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-25T05-53-40.914982.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-25T05-53-40.914982.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_01_25T05_53_40.914982", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-25T05-53-40.914982.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-25T05-53-40.914982.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_01_25T05_53_40.914982", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-25T05-53-40.914982.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-25T05-53-40.914982.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_01_25T05_53_40.914982", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-25T05-53-40.914982.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-25T05-53-40.914982.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_01_25T05_53_40.914982", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-25T05-53-40.914982.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-25T05-53-40.914982.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_01_25T05_53_40.914982", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-25T05-53-40.914982.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-25T05-53-40.914982.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_01_25T05_53_40.914982", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-25T05-53-40.914982.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-25T05-53-40.914982.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_01_25T05_53_40.914982", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-25T05-53-40.914982.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-25T05-53-40.914982.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_01_25T05_53_40.914982", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-25T05-53-40.914982.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-25T05-53-40.914982.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_01_25T05_53_40.914982", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-25T05-53-40.914982.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-25T05-53-40.914982.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_01_25T05_53_40.914982", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-25T05-53-40.914982.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-25T05-53-40.914982.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_01_25T05_53_40.914982", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-25T05-53-40.914982.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-25T05-53-40.914982.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_01_25T05_53_40.914982", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-25T05-53-40.914982.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-25T05-53-40.914982.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_01_25T05_53_40.914982", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-25T05-53-40.914982.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-25T05-53-40.914982.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_01_25T05_53_40.914982", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-25T05-53-40.914982.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-25T05-53-40.914982.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_01_25T05_53_40.914982", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-25T05-53-40.914982.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-25T05-53-40.914982.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_01_25T05_53_40.914982", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-25T05-53-40.914982.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-25T05-53-40.914982.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_01_25T05_53_40.914982", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-25T05-53-40.914982.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-25T05-53-40.914982.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_01_25T05_53_40.914982", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-25T05-53-40.914982.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-25T05-53-40.914982.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_01_25T05_53_40.914982", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-25T05-53-40.914982.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-25T05-53-40.914982.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_01_25T05_53_40.914982", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-25T05-53-40.914982.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-25T05-53-40.914982.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_01_25T05_53_40.914982", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-25T05-53-40.914982.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-25T05-53-40.914982.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_01_25T05_53_40.914982", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-25T05-53-40.914982.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-25T05-53-40.914982.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_01_25T05_53_40.914982", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-25T05-53-40.914982.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-25T05-53-40.914982.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_01_25T05_53_40.914982", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-25T05-53-40.914982.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-25T05-53-40.914982.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_01_25T05_53_40.914982", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-25T05-53-40.914982.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-25T05-53-40.914982.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_01_25T05_53_40.914982", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-25T05-53-40.914982.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-25T05-53-40.914982.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_01_25T05_53_40.914982", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-25T05-53-40.914982.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-25T05-53-40.914982.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_01_25T05_53_40.914982", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-25T05-53-40.914982.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-25T05-53-40.914982.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_01_25T05_53_40.914982", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-25T05-53-40.914982.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-25T05-53-40.914982.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_01_25T05_53_40.914982", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-25T05-53-40.914982.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-25T05-53-40.914982.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_01_25T05_53_40.914982", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-25T05-53-40.914982.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-25T05-53-40.914982.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_01_25T05_53_40.914982", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-25T05-53-40.914982.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-25T05-53-40.914982.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_01_25T05_53_40.914982", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-25T05-53-40.914982.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-25T05-53-40.914982.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_01_25T05_53_40.914982", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-25T05-53-40.914982.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-25T05-53-40.914982.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_01_25T05_53_40.914982", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-25T05-53-40.914982.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-25T05-53-40.914982.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_01_25T05_53_40.914982", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-25T05-53-40.914982.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-25T05-53-40.914982.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_01_25T05_53_40.914982", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-25T05-53-40.914982.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-25T05-53-40.914982.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_01_25T05_53_40.914982", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-25T05-53-40.914982.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-25T05-53-40.914982.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_01_25T05_53_40.914982", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-25T05-53-40.914982.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-25T05-53-40.914982.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_01_25T05_53_40.914982", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-25T05-53-40.914982.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-25T05-53-40.914982.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_01_25T05_53_40.914982", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-25T05-53-40.914982.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-25T05-53-40.914982.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_01_25T05_53_40.914982", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-25T05-53-40.914982.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-25T05-53-40.914982.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_01_25T05_53_40.914982", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-25T05-53-40.914982.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-25T05-53-40.914982.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_01_25T05_53_40.914982", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-25T05-53-40.914982.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-25T05-53-40.914982.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_01_25T05_53_40.914982", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-25T05-53-40.914982.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-25T05-53-40.914982.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_01_25T05_53_40.914982", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-25T05-53-40.914982.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-25T05-53-40.914982.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_01_25T05_53_40.914982", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-25T05-53-40.914982.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-25T05-53-40.914982.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_01_25T05_53_40.914982", "path": ["**/details_harness|winogrande|5_2024-01-25T05-53-40.914982.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-01-25T05-53-40.914982.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_01_25T05_53_40.914982", "path": ["results_2024-01-25T05-53-40.914982.parquet"]}, {"split": "latest", "path": ["results_2024-01-25T05-53-40.914982.parquet"]}]}]}
2024-01-25T05:56:21+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of macadeliccc/SOLAR-10.7b-Instruct-dpo Dataset automatically created during the evaluation run of model macadeliccc/SOLAR-10.7b-Instruct-dpo on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-01-25T05:53:40.914982(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of macadeliccc/SOLAR-10.7b-Instruct-dpo\n\n\n\nDataset automatically created during the evaluation run of model macadeliccc/SOLAR-10.7b-Instruct-dpo on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-25T05:53:40.914982(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of macadeliccc/SOLAR-10.7b-Instruct-dpo\n\n\n\nDataset automatically created during the evaluation run of model macadeliccc/SOLAR-10.7b-Instruct-dpo on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-25T05:53:40.914982(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ 6, 195, 68, 4, 40, 29, 3, 4, 9, 6, 5, 7, 4, 7, 10, 9, 5, 9, 8, 10, 46, 8, 7, 10, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of macadeliccc/SOLAR-10.7b-Instruct-dpo\n\n\n\nDataset automatically created during the evaluation run of model macadeliccc/SOLAR-10.7b-Instruct-dpo on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2024-01-25T05:53:40.914982(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):## Dataset Details### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:## Uses### Direct Use### Out-of-Scope Use## Dataset Structure## Dataset Creation### Curation Rationale### Source Data#### Data Collection and Processing#### Who are the source data producers?### Annotations [optional]#### Annotation process#### Who are the annotators?#### Personal and Sensitive Information## Bias, Risks, and Limitations### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:## Glossary [optional]## More Information [optional]" ]
[ -0.05142812430858612, 0.20994915068149567, -0.00585571676492691, 0.013325153850018978, 0.10525461286306381, -0.01642579957842827, 0.018262913450598717, 0.09588988870382309, -0.00017397577175870538, 0.17381760478019714, -0.014986773952841759, 0.11027409881353378, 0.0935823917388916, 0.1373642086982727, 0.021708790212869644, -0.13869550824165344, 0.026141567155718803, -0.0813550129532814, 0.0722564160823822, 0.07245547324419022, 0.09053688496351242, -0.08193106949329376, 0.05700217932462692, -0.06105855852365494, -0.020363837480545044, 0.011476720683276653, -0.0915747880935669, -0.04200923442840576, 0.09792006015777588, 0.08298324048519135, 0.03628838434815407, -0.00740389060229063, 0.024404192343354225, -0.2472764402627945, 0.009584357030689716, 0.0852956771850586, -0.003085873555392027, 0.05061173439025879, 0.12746654450893402, -0.035470567643642426, 0.07917702198028564, -0.09242919087409973, 0.06419036537408829, 0.02870974875986576, -0.11015775799751282, -0.12607139348983765, -0.12671564519405365, 0.016411401331424713, 0.06904178857803345, 0.04680963605642319, -0.018784213811159134, 0.14366522431373596, -0.012844307348132133, 0.04033580794930458, 0.08716302365064621, -0.09727460145950317, -0.01785915717482567, 0.06150289252400398, 0.022088250145316124, 0.0656544417142868, -0.08360299468040466, -0.011860423721373081, 0.035720255225896835, 0.03681793063879013, 0.022283116355538368, 0.002425478771328926, -0.08004331588745117, 0.02331182360649109, -0.12223580479621887, -0.12455590069293976, 0.21984726190567017, -0.0014194650575518608, -0.04470383748412132, -0.14961615204811096, -0.03608459234237671, 0.024762803688645363, 0.008166639134287834, -0.026939615607261658, 0.015623800456523895, -0.009463882073760033, 0.06828290224075317, -0.01871037296950817, -0.1042291596531868, -0.015309750102460384, -0.013144690543413162, 0.06605804711580276, 0.016110645607113838, -0.007756909821182489, 0.012505501508712769, 0.13402977585792542, 0.006279677152633667, -0.09684090316295624, -0.07326291501522064, -0.037441328167915344, -0.1139182597398758, -0.04485315456986427, 0.002395087154582143, -0.05530571565032005, 0.034643445163965225, 0.2279820442199707, -0.030302437022328377, 0.031076764687895775, -0.08357304334640503, 0.0015621938509866595, 0.12963838875293732, 0.07573336362838745, -0.06085459887981415, -0.05072648078203201, -0.02053421176970005, 0.0010698427213355899, 0.04849368333816528, -0.02415201999247074, 0.0033867298625409603, 0.061661116778850555, 0.05449316278100014, 0.12329608201980591, 0.11660608649253845, 0.011439215391874313, -0.06851960718631744, -0.027560997754335403, 0.21165768802165985, -0.15460126101970673, -0.00021973495313432068, 0.011544804088771343, -0.02671189419925213, -0.08304836601018906, 0.06162673607468605, -0.005134194623678923, -0.061215370893478394, 0.09334979951381683, -0.04203915223479271, -0.07638297975063324, -0.09058557450771332, -0.039365604519844055, 0.05373026803135872, 0.00818665325641632, -0.01243429072201252, -0.07933133095502853, -0.10827579349279404, -0.0709214136004448, 0.02363060973584652, -0.08260563760995865, -0.0414152666926384, 0.04668830335140228, -0.01333048939704895, -0.026011915877461433, -0.02246668189764023, 0.13031327724456787, -0.051981423050165176, 0.025789203122258186, 0.01734774559736252, 0.011322512291371822, 0.05975395068526268, 0.045262787491083145, -0.11646953225135803, 0.08534195274114609, -0.10461597144603729, 0.0999673381447792, -0.09249397367238998, -0.0013086809776723385, -0.13549625873565674, 0.0026296896394342184, -0.027795245870947838, 0.002593905432149768, 0.013160992413759232, 0.10864036530256271, -0.24639371037483215, 0.030074048787355423, 0.09337564557790756, -0.10349183529615402, -0.08239075541496277, 0.056348901242017746, -0.03167783096432686, 0.08098937571048737, 0.036425553262233734, 0.07246487587690353, 0.06465539336204529, -0.07034654170274734, -0.11494778096675873, -0.07556319236755371, -0.011656038463115692, 0.12836100161075592, 0.06558411568403244, -0.06899179518222809, 0.09582263231277466, 0.03679599240422249, -0.03675008937716484, -0.0832245722413063, -0.012390502728521824, -0.0628042221069336, -0.02656392566859722, -0.027752570807933807, -0.09803616255521774, 0.008331019431352615, -0.08781915158033371, -0.016417043283581734, -0.07852914184331894, 0.031710512936115265, 0.09811017662286758, -0.026685915887355804, 0.016560863703489304, -0.05992767959833145, 0.05424250662326813, -0.005782170221209526, 0.014167447574436665, -0.20764851570129395, -0.0837492123246193, 0.04925638809800148, -0.1022452786564827, 0.03125591203570366, 0.01676197722554207, 0.0028941973578184843, 0.03386818245053291, -0.0013239852851256728, 0.024060366675257683, -0.0014447879511862993, -0.0002442509576212615, -0.0028411620296537876, -0.13078409433364868, -0.05797971412539482, -0.07600167393684387, 0.10436668992042542, -0.13970163464546204, -0.014950556680560112, 0.08995231240987778, 0.15703071653842926, -0.012823192402720451, -0.08630622923374176, 0.07358864694833755, -0.0073325540870428085, -0.03853801637887955, -0.07148256152868271, 0.0007435071747750044, -0.011830369010567665, 0.038614869117736816, 0.017525959759950638, -0.19243702292442322, -0.15758512914180756, 0.07521270960569382, 0.10313090682029724, -0.06731349229812622, -0.06326740235090256, -0.04825287312269211, -0.06076376512646675, -0.08468195796012878, -0.06947899609804153, 0.06818372011184692, 0.06510356813669205, 0.021985070779919624, -0.06329891830682755, -0.06573717296123505, 0.0007961294031701982, 0.05268070846796036, -0.07781030237674713, 0.08447974175214767, 0.09461551904678345, -0.09866498410701752, 0.0911862924695015, 0.029675699770450592, 0.12216286361217499, 0.048596225678920746, 0.011947602033615112, -0.11553607881069183, -0.01514405757188797, 0.05362563207745552, 0.03702107071876526, 0.10467380285263062, -0.03735988959670067, 0.04707492142915726, 0.07834278047084808, -0.02844337932765484, 0.05229062959551811, -0.06473460048437119, 0.035602156072854996, 0.04459698870778084, 0.007427901960909367, 0.024822695180773735, -0.006467811763286591, 0.005802610889077187, 0.046838901937007904, 0.02839338406920433, 0.0862148255109787, -0.011277751997113228, -0.04885049909353256, -0.09054555743932724, 0.15391510725021362, -0.08553687483072281, -0.25923866033554077, -0.15979672968387604, -0.00408112583681941, -0.012350931763648987, -0.006380242761224508, 0.04381139576435089, 0.0024669449776411057, -0.11188264191150665, -0.09720712900161743, 0.05292262136936188, 0.002823346061632037, -0.12061778455972672, -0.03226090222597122, 0.048725567758083344, 0.0028643780387938023, -0.15626531839370728, 0.04412310943007469, 0.04825013875961304, -0.048387277871370316, 0.007397836074233055, 0.08314048498868942, 0.14232303202152252, 0.07267923653125763, 0.03503859415650368, -0.03696959465742111, 0.00950632430613041, 0.19144634902477264, -0.11380907893180847, 0.02946726232767105, 0.12566782534122467, -0.03552524372935295, 0.05521581321954727, 0.1749916821718216, 0.01054245326668024, -0.10583265870809555, 0.037741728127002716, 0.06617281585931778, -0.06904148310422897, -0.25607410073280334, -0.09807759523391724, -0.02823244407773018, -0.025004100054502487, 0.09819578379392624, 0.06918209046125412, 0.001224161358550191, 0.04104868322610855, -0.10780410468578339, -0.026978597044944763, -0.07104485481977463, 0.08042708784341812, 0.08295299857854843, 0.004107134882360697, 0.04488593339920044, -0.0458529032766819, 0.04244188964366913, 0.10824308544397354, 0.07245761156082153, 0.1520523726940155, -0.020894477143883705, 0.15825867652893066, 0.0702417641878128, 0.09973737597465515, -0.050886351615190506, 0.059281185269355774, 0.007946688681840897, 0.06578322499990463, -0.007215545512735844, -0.09746742993593216, -0.0664076954126358, 0.08621539920568466, 0.003309220541268587, -0.0565929152071476, 0.02942820079624653, -0.06470172852277756, 0.02462770789861679, 0.1648055911064148, -0.010587343014776707, -0.14721164107322693, -0.06802031397819519, 0.0757773146033287, -0.017018254846334457, -0.10751383751630783, -0.02700907737016678, 0.09526704996824265, -0.14732365310192108, 0.010765318758785725, -0.023391254246234894, 0.0807032436132431, -0.1351618617773056, -0.017343806102871895, -0.018830111250281334, 0.06782771646976471, 0.0038051591254770756, 0.11903698742389679, -0.12222425639629364, 0.0984540730714798, 0.0021508883219212294, 0.03183674067258835, -0.09211082011461258, 0.06918937712907791, -0.05710645020008087, -0.017082972452044487, 0.13408835232257843, -0.007609029766172171, -0.12340035289525986, -0.052173711359500885, -0.1169629693031311, 0.014512143097817898, 0.054632484912872314, -0.12078414857387543, 0.09816430509090424, 0.029235946014523506, -0.004693326540291309, -0.020699195563793182, -0.025695910677313805, -0.12417381256818771, -0.2321925014257431, 0.10792036354541779, -0.1070733368396759, 0.07745020091533661, -0.05382396653294563, -0.034865908324718475, -0.055870313197374344, 0.1710604876279831, -0.10026900470256805, -0.070417620241642, -0.11387438327074051, 0.017433959990739822, 0.1910155564546585, -0.046556826680898666, 0.04867829009890556, -0.04428776353597641, 0.16022783517837524, 0.008027978241443634, -0.05847660079598427, -0.0005998671986162663, -0.0933457612991333, -0.14096350967884064, -0.034349121153354645, 0.12370089441537857, 0.040415890514850616, 0.010515131056308746, 0.008695272728800774, 0.04990515857934952, -0.014914254657924175, -0.09365426003932953, 0.03764405846595764, 0.0726006031036377, 0.11124669760465622, 0.04420524463057518, -0.03671671822667122, -0.10881521552801132, -0.09108206629753113, -0.10596910119056702, 0.05828646197915077, 0.1403225064277649, -0.05861395224928856, 0.1335725039243698, 0.1316705197095871, -0.0940147116780281, -0.1803688108921051, -0.05356993153691292, -0.000015124174751690589, -0.011776435188949108, 0.10727743059396744, -0.2213015854358673, 0.05288174748420715, 0.06764281541109085, -0.006368075031787157, 0.06631876528263092, -0.2491571605205536, -0.12749063968658447, 0.04355739802122116, 0.030733417719602585, -0.22297564148902893, -0.17598609626293182, -0.12315445393323898, -0.025396356359124184, -0.18591426312923431, 0.13991181552410126, -0.006168516352772713, 0.02881292626261711, -0.007301848381757736, 0.0793234333395958, 0.05827958881855011, -0.05136857554316521, 0.15862029790878296, -0.0013574501499533653, 0.00719241751357913, -0.09549614787101746, -0.016052788123488426, 0.024060212075710297, -0.03333541750907898, 0.0974104106426239, 0.05862921103835106, 0.05542926490306854, -0.102228544652462, -0.02482532151043415, -0.039896368980407715, 0.04866543039679527, -0.07710810750722885, -0.0654972568154335, -0.057814158499240875, 0.07220298796892166, 0.07570497691631317, -0.0250144824385643, 0.04687556251883507, -0.021478382870554924, 0.038377709686756134, 0.21915383636951447, 0.09730394929647446, 0.05917761102318764, -0.09651532769203186, -0.01157030276954174, -0.009867946617305279, 0.011661696247756481, -0.13871780037879944, 0.03885442018508911, 0.07647296786308289, 0.042415253818035126, 0.05447946861386299, -0.021066373214125633, -0.18572930991649628, -0.003954474348574877, 0.07175444066524506, -0.10769644379615784, -0.22619125247001648, 0.03264065086841583, 0.1504712551832199, -0.1227002665400505, -0.030955016613006592, 0.07982143759727478, 0.012203264981508255, -0.03299649432301521, 0.0043153949081897736, 0.06963559240102768, 0.05475882068276405, 0.09658573567867279, 0.007996036671102047, 0.051093194633722305, -0.0827980488538742, 0.1142113208770752, 0.15506576001644135, -0.10750224441289902, 0.015864092856645584, 0.048160433769226074, -0.046540647745132446, -0.05312415957450867, 0.02037927508354187, -0.02868594229221344, -0.019171755760908127, -0.05790703743696213, 0.017259564250707626, -0.028081415221095085, 0.04699945077300072, 0.07355131208896637, 0.008489081636071205, 0.037323083728551865, 0.023223213851451874, -0.011575616896152496, -0.08423516154289246, 0.0760168731212616, 0.012562827207148075, 0.0458548367023468, -0.03910347446799278, 0.020884914323687553, 0.0060126036405563354, -0.003559087635949254, 0.009742013178765774, -0.046742431819438934, -0.042500369250774384, -0.007260575890541077, -0.12742231786251068, 0.011594927869737148, -0.08059551566839218, -0.009135597385466099, -0.0000011387686527086771, -0.012713532894849777, -0.014498816803097725, 0.028783168643712997, -0.04414194077253342, -0.059126779437065125, -0.060130525380373, 0.11219696700572968, -0.18904852867126465, 0.0076903002336621284, 0.09650600701570511, -0.08644139021635056, 0.08098451793193817, 0.004552002530544996, -0.0007913373410701752, -0.008871224708855152, -0.10927137732505798, -0.01845325343310833, -0.018559345975518227, 0.05885057896375656, 0.01045110821723938, -0.16230228543281555, -0.017748404294252396, 0.013920649886131287, -0.08058762550354004, -0.023999769240617752, 0.07473422586917877, -0.1466301530599594, 0.01830768585205078, 0.05118674784898758, -0.03966283053159714, -0.04530727490782738, 0.057532910257577896, 0.03276935592293739, 0.006987695582211018, 0.0810001865029335, -0.00008959040133049712, 0.027596665546298027, -0.1446041464805603, -0.05021258443593979, -0.003221416613087058, 0.016459232196211815, 0.004037767183035612, 0.02619791403412819, 0.04472016543149948, 0.008109851740300655, 0.20459699630737305, -0.03604826703667641, 0.09162917733192444, 0.03235241770744324, 0.0393446646630764, -0.03352700546383858, 0.025129104033112526, 0.06382858008146286, 0.025668209418654442, 0.03700720891356468, 0.029328593984246254, -0.029467005282640457, -0.04125252366065979, -0.018447360023856163, 0.05359668657183647, 0.15507446229457855, 0.13798180222511292, -0.02701237052679062, 0.06418731808662415, -0.1640462577342987, -0.05263108015060425, 0.04751249775290489, -0.031332626938819885, 0.037228088825941086, -0.07180308550596237, 0.04525548219680786, 0.06912771612405777, -0.12016177922487259, 0.13057631254196167, -0.06071378290653229, -0.041875313967466354, -0.0270136296749115, -0.12191656976938248, -0.03332926705479622, 0.012631869874894619, 0.011858121491968632, -0.09718576073646545, 0.09860553592443466, 0.13987717032432556, -0.02545706368982792, -0.016354819759726524, 0.09338221698999405, -0.04340687766671181, -0.06871505826711655, -0.04099548980593681, 0.017051858827471733, 0.04033534973859787, -0.01971949078142643, 0.07471521943807602, 0.01577819511294365, 0.08212210237979889, 0.0702437087893486, 0.09686382114887238, 0.0904649943113327, 0.023302648216485977, -0.034573156386613846, -0.05198576673865318, 0.0019153921166434884, -0.016108309850096703, -0.050874847918748856, 0.1899290829896927, 0.04175638407468796, 0.03233931213617325, 0.013062004931271076, 0.21479342877864838, -0.011353561654686928, -0.0649605467915535, -0.1261647641658783, 0.08544468134641647, -0.016728410497307777, 0.030597107484936714, 0.054123882204294205, -0.14114004373550415, 0.012010987848043442, 0.15549778938293457, 0.07097530364990234, 0.0307613518089056, 0.00791893433779478, 0.04294842109084129, 0.021600665524601936, -0.04214311018586159, 0.04990491271018982, 0.045589838176965714, 0.16714948415756226, -0.060885559767484665, 0.06300602853298187, -0.01028373558074236, -0.014584552496671677, -0.05170854181051254, 0.07762406766414642, -0.06851305812597275, 0.0026047893334180117, -0.03753850981593132, 0.10965819656848907, -0.0210855882614851, -0.2919286787509918, 0.000740090967155993, -0.09352439641952515, -0.1293584406375885, -0.029529593884944916, 0.06237271800637245, -0.03994561359286308, 0.019286418333649635, 0.0272316075861454, -0.007296357303857803, 0.20612967014312744, 0.03200916200876236, -0.07473083585500717, -0.038805726915597916, 0.07715819031000137, -0.03348566219210625, 0.2429009974002838, -0.0012622284702956676, 0.07662133872509003, 0.08657866716384888, -0.01630757749080658, -0.17316757142543793, 0.026833824813365936, 0.116050124168396, -0.04332821071147919, 0.07451736927032471, 0.16058464348316193, -0.017161022871732712, 0.12909795343875885, 0.04622882232069969, -0.0005701602785848081, 0.04070520028471947, 0.08515073359012604, 0.05701669305562973, -0.08823070675134659, 0.05645539611577988, -0.09588492661714554, 0.13102932274341583, 0.09585896879434586, -0.03185205161571503, -0.006782965734601021, -0.07718542218208313, 0.06368893384933472, -0.021708576008677483, 0.1223425418138504, -0.004784836899489164, -0.12991708517074585, 0.03417340666055679, 0.007076776586472988, 0.07722750306129456, -0.19542063772678375, -0.05965325981378555, 0.11643968522548676, -0.048751749098300934, -0.014192693866789341, 0.09647807478904724, 0.039339691400527954, 0.007869717665016651, -0.06581903994083405, -0.09470386058092117, -0.012417193502187729, 0.10680574923753738, -0.08897477388381958, -0.028522176668047905 ]
0d870c5b11200fdb7da12ddfb197bf57f2ebaba3
# Dataset Card for Evaluation run of neovalle/H4rmoniousAnthea <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [neovalle/H4rmoniousAnthea](https://huggingface.co/neovalle/H4rmoniousAnthea) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_neovalle__H4rmoniousAnthea", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-01-25T05:56:18.084977](https://huggingface.co/datasets/open-llm-leaderboard/details_neovalle__H4rmoniousAnthea/blob/main/results_2024-01-25T05-56-18.084977.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.630582687926132, "acc_stderr": 0.03217475637386003, "acc_norm": 0.6405543770437054, "acc_norm_stderr": 0.03288367770300838, "mc1": 0.3708690330477356, "mc1_stderr": 0.016909693580248818, "mc2": 0.5507958189879629, "mc2_stderr": 0.015408052923903376 }, "harness|arc:challenge|25": { "acc": 0.621160409556314, "acc_stderr": 0.014175915490000324, "acc_norm": 0.658703071672355, "acc_norm_stderr": 0.013855831287497731 }, "harness|hellaswag|10": { "acc": 0.6528579964150567, "acc_stderr": 0.00475088440109516, "acc_norm": 0.8408683529177454, "acc_norm_stderr": 0.003650512158306266 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.34, "acc_stderr": 0.04760952285695236, "acc_norm": 0.34, "acc_norm_stderr": 0.04760952285695236 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.5851851851851851, "acc_stderr": 0.04256193767901408, "acc_norm": 0.5851851851851851, "acc_norm_stderr": 0.04256193767901408 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.6776315789473685, "acc_stderr": 0.03803510248351585, "acc_norm": 0.6776315789473685, "acc_norm_stderr": 0.03803510248351585 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.57, "acc_stderr": 0.049756985195624284, "acc_norm": 0.57, "acc_norm_stderr": 0.049756985195624284 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.6867924528301886, "acc_stderr": 0.028544793319055326, "acc_norm": 0.6867924528301886, "acc_norm_stderr": 0.028544793319055326 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.7430555555555556, "acc_stderr": 0.03653946969442099, "acc_norm": 0.7430555555555556, "acc_norm_stderr": 0.03653946969442099 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.48, "acc_stderr": 0.050211673156867795, "acc_norm": 0.48, "acc_norm_stderr": 0.050211673156867795 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.47, "acc_stderr": 0.050161355804659205, "acc_norm": 0.47, "acc_norm_stderr": 0.050161355804659205 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.32, "acc_stderr": 0.04688261722621505, "acc_norm": 0.32, "acc_norm_stderr": 0.04688261722621505 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.6184971098265896, "acc_stderr": 0.03703851193099521, "acc_norm": 0.6184971098265896, "acc_norm_stderr": 0.03703851193099521 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.38235294117647056, "acc_stderr": 0.04835503696107224, "acc_norm": 0.38235294117647056, "acc_norm_stderr": 0.04835503696107224 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.77, "acc_stderr": 0.042295258468165065, "acc_norm": 0.77, "acc_norm_stderr": 0.042295258468165065 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.548936170212766, "acc_stderr": 0.032529096196131965, "acc_norm": 0.548936170212766, "acc_norm_stderr": 0.032529096196131965 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.47368421052631576, "acc_stderr": 0.04697085136647863, "acc_norm": 0.47368421052631576, "acc_norm_stderr": 0.04697085136647863 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.5448275862068965, "acc_stderr": 0.04149886942192117, "acc_norm": 0.5448275862068965, "acc_norm_stderr": 0.04149886942192117 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.41534391534391535, "acc_stderr": 0.025379524910778398, "acc_norm": 0.41534391534391535, "acc_norm_stderr": 0.025379524910778398 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.4603174603174603, "acc_stderr": 0.04458029125470973, "acc_norm": 0.4603174603174603, "acc_norm_stderr": 0.04458029125470973 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.36, "acc_stderr": 0.04824181513244218, "acc_norm": 0.36, "acc_norm_stderr": 0.04824181513244218 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.7967741935483871, "acc_stderr": 0.02289168798455495, "acc_norm": 0.7967741935483871, "acc_norm_stderr": 0.02289168798455495 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.5024630541871922, "acc_stderr": 0.035179450386910616, "acc_norm": 0.5024630541871922, "acc_norm_stderr": 0.035179450386910616 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.67, "acc_stderr": 0.04725815626252607, "acc_norm": 0.67, "acc_norm_stderr": 0.04725815626252607 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.7878787878787878, "acc_stderr": 0.03192271569548301, "acc_norm": 0.7878787878787878, "acc_norm_stderr": 0.03192271569548301 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.803030303030303, "acc_stderr": 0.028335609732463362, "acc_norm": 0.803030303030303, "acc_norm_stderr": 0.028335609732463362 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.8860103626943006, "acc_stderr": 0.022935144053919443, "acc_norm": 0.8860103626943006, "acc_norm_stderr": 0.022935144053919443 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.6230769230769231, "acc_stderr": 0.024570975364225995, "acc_norm": 0.6230769230769231, "acc_norm_stderr": 0.024570975364225995 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.3074074074074074, "acc_stderr": 0.02813325257881564, "acc_norm": 0.3074074074074074, "acc_norm_stderr": 0.02813325257881564 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.6722689075630253, "acc_stderr": 0.03048991141767323, "acc_norm": 0.6722689075630253, "acc_norm_stderr": 0.03048991141767323 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.33112582781456956, "acc_stderr": 0.038425817186598696, "acc_norm": 0.33112582781456956, "acc_norm_stderr": 0.038425817186598696 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.8293577981651377, "acc_stderr": 0.01612927102509986, "acc_norm": 0.8293577981651377, "acc_norm_stderr": 0.01612927102509986 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.5, "acc_stderr": 0.034099716973523674, "acc_norm": 0.5, "acc_norm_stderr": 0.034099716973523674 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.8186274509803921, "acc_stderr": 0.027044621719474086, "acc_norm": 0.8186274509803921, "acc_norm_stderr": 0.027044621719474086 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.8016877637130801, "acc_stderr": 0.02595502084162113, "acc_norm": 0.8016877637130801, "acc_norm_stderr": 0.02595502084162113 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.695067264573991, "acc_stderr": 0.030898610882477515, "acc_norm": 0.695067264573991, "acc_norm_stderr": 0.030898610882477515 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.7938931297709924, "acc_stderr": 0.03547771004159465, "acc_norm": 0.7938931297709924, "acc_norm_stderr": 0.03547771004159465 }, "harness|hendrycksTest-international_law|5": { "acc": 0.7851239669421488, "acc_stderr": 0.037494924487096966, "acc_norm": 0.7851239669421488, "acc_norm_stderr": 0.037494924487096966 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.8148148148148148, "acc_stderr": 0.03755265865037181, "acc_norm": 0.8148148148148148, "acc_norm_stderr": 0.03755265865037181 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.7914110429447853, "acc_stderr": 0.031921934489347235, "acc_norm": 0.7914110429447853, "acc_norm_stderr": 0.031921934489347235 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.48214285714285715, "acc_stderr": 0.047427623612430116, "acc_norm": 0.48214285714285715, "acc_norm_stderr": 0.047427623612430116 }, "harness|hendrycksTest-management|5": { "acc": 0.7961165048543689, "acc_stderr": 0.039891398595317706, "acc_norm": 0.7961165048543689, "acc_norm_stderr": 0.039891398595317706 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8589743589743589, "acc_stderr": 0.022801382534597528, "acc_norm": 0.8589743589743589, "acc_norm_stderr": 0.022801382534597528 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.7, "acc_stderr": 0.046056618647183814, "acc_norm": 0.7, "acc_norm_stderr": 0.046056618647183814 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.8212005108556832, "acc_stderr": 0.013702643715368985, "acc_norm": 0.8212005108556832, "acc_norm_stderr": 0.013702643715368985 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.7138728323699421, "acc_stderr": 0.02433214677913413, "acc_norm": 0.7138728323699421, "acc_norm_stderr": 0.02433214677913413 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.3106145251396648, "acc_stderr": 0.015476515438005566, "acc_norm": 0.3106145251396648, "acc_norm_stderr": 0.015476515438005566 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.761437908496732, "acc_stderr": 0.02440439492808787, "acc_norm": 0.761437908496732, "acc_norm_stderr": 0.02440439492808787 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.7106109324758842, "acc_stderr": 0.025755865922632945, "acc_norm": 0.7106109324758842, "acc_norm_stderr": 0.025755865922632945 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.7376543209876543, "acc_stderr": 0.024477222856135114, "acc_norm": 0.7376543209876543, "acc_norm_stderr": 0.024477222856135114 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.5, "acc_stderr": 0.029827499313594685, "acc_norm": 0.5, "acc_norm_stderr": 0.029827499313594685 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.4621903520208605, "acc_stderr": 0.012733671880342506, "acc_norm": 0.4621903520208605, "acc_norm_stderr": 0.012733671880342506 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.6838235294117647, "acc_stderr": 0.028245687391462937, "acc_norm": 0.6838235294117647, "acc_norm_stderr": 0.028245687391462937 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.6584967320261438, "acc_stderr": 0.019184639328092487, "acc_norm": 0.6584967320261438, "acc_norm_stderr": 0.019184639328092487 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6454545454545455, "acc_stderr": 0.045820048415054174, "acc_norm": 0.6454545454545455, "acc_norm_stderr": 0.045820048415054174 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.7306122448979592, "acc_stderr": 0.02840125202902294, "acc_norm": 0.7306122448979592, "acc_norm_stderr": 0.02840125202902294 }, "harness|hendrycksTest-sociology|5": { "acc": 0.8109452736318408, "acc_stderr": 0.02768691358801301, "acc_norm": 0.8109452736318408, "acc_norm_stderr": 0.02768691358801301 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.85, "acc_stderr": 0.03588702812826371, "acc_norm": 0.85, "acc_norm_stderr": 0.03588702812826371 }, "harness|hendrycksTest-virology|5": { "acc": 0.5602409638554217, "acc_stderr": 0.03864139923699122, "acc_norm": 0.5602409638554217, "acc_norm_stderr": 0.03864139923699122 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.8421052631578947, "acc_stderr": 0.02796678585916089, "acc_norm": 0.8421052631578947, "acc_norm_stderr": 0.02796678585916089 }, "harness|truthfulqa:mc|0": { "mc1": 0.3708690330477356, "mc1_stderr": 0.016909693580248818, "mc2": 0.5507958189879629, "mc2_stderr": 0.015408052923903376 }, "harness|winogrande|5": { "acc": 0.7687450670876085, "acc_stderr": 0.011850040124850508 }, "harness|gsm8k|5": { "acc": 0.12964366944655042, "acc_stderr": 0.009252657757825552 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_neovalle__H4rmoniousAnthea
[ "region:us" ]
2024-01-25T05:58:37+00:00
{"pretty_name": "Evaluation run of neovalle/H4rmoniousAnthea", "dataset_summary": "Dataset automatically created during the evaluation run of model [neovalle/H4rmoniousAnthea](https://huggingface.co/neovalle/H4rmoniousAnthea) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_neovalle__H4rmoniousAnthea\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-01-25T05:56:18.084977](https://huggingface.co/datasets/open-llm-leaderboard/details_neovalle__H4rmoniousAnthea/blob/main/results_2024-01-25T05-56-18.084977.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.630582687926132,\n \"acc_stderr\": 0.03217475637386003,\n \"acc_norm\": 0.6405543770437054,\n \"acc_norm_stderr\": 0.03288367770300838,\n \"mc1\": 0.3708690330477356,\n \"mc1_stderr\": 0.016909693580248818,\n \"mc2\": 0.5507958189879629,\n \"mc2_stderr\": 0.015408052923903376\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.621160409556314,\n \"acc_stderr\": 0.014175915490000324,\n \"acc_norm\": 0.658703071672355,\n \"acc_norm_stderr\": 0.013855831287497731\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6528579964150567,\n \"acc_stderr\": 0.00475088440109516,\n \"acc_norm\": 0.8408683529177454,\n \"acc_norm_stderr\": 0.003650512158306266\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.34,\n \"acc_stderr\": 0.04760952285695236,\n \"acc_norm\": 0.34,\n \"acc_norm_stderr\": 0.04760952285695236\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.5851851851851851,\n \"acc_stderr\": 0.04256193767901408,\n \"acc_norm\": 0.5851851851851851,\n \"acc_norm_stderr\": 0.04256193767901408\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.6776315789473685,\n \"acc_stderr\": 0.03803510248351585,\n \"acc_norm\": 0.6776315789473685,\n \"acc_norm_stderr\": 0.03803510248351585\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.57,\n \"acc_stderr\": 0.049756985195624284,\n \"acc_norm\": 0.57,\n \"acc_norm_stderr\": 0.049756985195624284\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.6867924528301886,\n \"acc_stderr\": 0.028544793319055326,\n \"acc_norm\": 0.6867924528301886,\n \"acc_norm_stderr\": 0.028544793319055326\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.7430555555555556,\n \"acc_stderr\": 0.03653946969442099,\n \"acc_norm\": 0.7430555555555556,\n \"acc_norm_stderr\": 0.03653946969442099\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.48,\n \"acc_stderr\": 0.050211673156867795,\n \"acc_norm\": 0.48,\n \"acc_norm_stderr\": 0.050211673156867795\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.47,\n \"acc_stderr\": 0.050161355804659205,\n \"acc_norm\": 0.47,\n \"acc_norm_stderr\": 0.050161355804659205\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.32,\n \"acc_stderr\": 0.04688261722621505,\n \"acc_norm\": 0.32,\n \"acc_norm_stderr\": 0.04688261722621505\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.6184971098265896,\n \"acc_stderr\": 0.03703851193099521,\n \"acc_norm\": 0.6184971098265896,\n \"acc_norm_stderr\": 0.03703851193099521\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.38235294117647056,\n \"acc_stderr\": 0.04835503696107224,\n \"acc_norm\": 0.38235294117647056,\n \"acc_norm_stderr\": 0.04835503696107224\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.77,\n \"acc_stderr\": 0.042295258468165065,\n \"acc_norm\": 0.77,\n \"acc_norm_stderr\": 0.042295258468165065\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.548936170212766,\n \"acc_stderr\": 0.032529096196131965,\n \"acc_norm\": 0.548936170212766,\n \"acc_norm_stderr\": 0.032529096196131965\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.47368421052631576,\n \"acc_stderr\": 0.04697085136647863,\n \"acc_norm\": 0.47368421052631576,\n \"acc_norm_stderr\": 0.04697085136647863\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.5448275862068965,\n \"acc_stderr\": 0.04149886942192117,\n \"acc_norm\": 0.5448275862068965,\n \"acc_norm_stderr\": 0.04149886942192117\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.41534391534391535,\n \"acc_stderr\": 0.025379524910778398,\n \"acc_norm\": 0.41534391534391535,\n \"acc_norm_stderr\": 0.025379524910778398\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.4603174603174603,\n \"acc_stderr\": 0.04458029125470973,\n \"acc_norm\": 0.4603174603174603,\n \"acc_norm_stderr\": 0.04458029125470973\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.36,\n \"acc_stderr\": 0.04824181513244218,\n \"acc_norm\": 0.36,\n \"acc_norm_stderr\": 0.04824181513244218\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.7967741935483871,\n \"acc_stderr\": 0.02289168798455495,\n \"acc_norm\": 0.7967741935483871,\n \"acc_norm_stderr\": 0.02289168798455495\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.5024630541871922,\n \"acc_stderr\": 0.035179450386910616,\n \"acc_norm\": 0.5024630541871922,\n \"acc_norm_stderr\": 0.035179450386910616\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.67,\n \"acc_stderr\": 0.04725815626252607,\n \"acc_norm\": 0.67,\n \"acc_norm_stderr\": 0.04725815626252607\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.7878787878787878,\n \"acc_stderr\": 0.03192271569548301,\n \"acc_norm\": 0.7878787878787878,\n \"acc_norm_stderr\": 0.03192271569548301\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.803030303030303,\n \"acc_stderr\": 0.028335609732463362,\n \"acc_norm\": 0.803030303030303,\n \"acc_norm_stderr\": 0.028335609732463362\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.8860103626943006,\n \"acc_stderr\": 0.022935144053919443,\n \"acc_norm\": 0.8860103626943006,\n \"acc_norm_stderr\": 0.022935144053919443\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.6230769230769231,\n \"acc_stderr\": 0.024570975364225995,\n \"acc_norm\": 0.6230769230769231,\n \"acc_norm_stderr\": 0.024570975364225995\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.3074074074074074,\n \"acc_stderr\": 0.02813325257881564,\n \"acc_norm\": 0.3074074074074074,\n \"acc_norm_stderr\": 0.02813325257881564\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.6722689075630253,\n \"acc_stderr\": 0.03048991141767323,\n \"acc_norm\": 0.6722689075630253,\n \"acc_norm_stderr\": 0.03048991141767323\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.33112582781456956,\n \"acc_stderr\": 0.038425817186598696,\n \"acc_norm\": 0.33112582781456956,\n \"acc_norm_stderr\": 0.038425817186598696\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.8293577981651377,\n \"acc_stderr\": 0.01612927102509986,\n \"acc_norm\": 0.8293577981651377,\n \"acc_norm_stderr\": 0.01612927102509986\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.5,\n \"acc_stderr\": 0.034099716973523674,\n \"acc_norm\": 0.5,\n \"acc_norm_stderr\": 0.034099716973523674\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.8186274509803921,\n \"acc_stderr\": 0.027044621719474086,\n \"acc_norm\": 0.8186274509803921,\n \"acc_norm_stderr\": 0.027044621719474086\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.8016877637130801,\n \"acc_stderr\": 0.02595502084162113,\n \"acc_norm\": 0.8016877637130801,\n \"acc_norm_stderr\": 0.02595502084162113\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.695067264573991,\n \"acc_stderr\": 0.030898610882477515,\n \"acc_norm\": 0.695067264573991,\n \"acc_norm_stderr\": 0.030898610882477515\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.7938931297709924,\n \"acc_stderr\": 0.03547771004159465,\n \"acc_norm\": 0.7938931297709924,\n \"acc_norm_stderr\": 0.03547771004159465\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.7851239669421488,\n \"acc_stderr\": 0.037494924487096966,\n \"acc_norm\": 0.7851239669421488,\n \"acc_norm_stderr\": 0.037494924487096966\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.8148148148148148,\n \"acc_stderr\": 0.03755265865037181,\n \"acc_norm\": 0.8148148148148148,\n \"acc_norm_stderr\": 0.03755265865037181\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.7914110429447853,\n \"acc_stderr\": 0.031921934489347235,\n \"acc_norm\": 0.7914110429447853,\n \"acc_norm_stderr\": 0.031921934489347235\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.48214285714285715,\n \"acc_stderr\": 0.047427623612430116,\n \"acc_norm\": 0.48214285714285715,\n \"acc_norm_stderr\": 0.047427623612430116\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.7961165048543689,\n \"acc_stderr\": 0.039891398595317706,\n \"acc_norm\": 0.7961165048543689,\n \"acc_norm_stderr\": 0.039891398595317706\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8589743589743589,\n \"acc_stderr\": 0.022801382534597528,\n \"acc_norm\": 0.8589743589743589,\n \"acc_norm_stderr\": 0.022801382534597528\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.7,\n \"acc_stderr\": 0.046056618647183814,\n \"acc_norm\": 0.7,\n \"acc_norm_stderr\": 0.046056618647183814\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.8212005108556832,\n \"acc_stderr\": 0.013702643715368985,\n \"acc_norm\": 0.8212005108556832,\n \"acc_norm_stderr\": 0.013702643715368985\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.7138728323699421,\n \"acc_stderr\": 0.02433214677913413,\n \"acc_norm\": 0.7138728323699421,\n \"acc_norm_stderr\": 0.02433214677913413\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.3106145251396648,\n \"acc_stderr\": 0.015476515438005566,\n \"acc_norm\": 0.3106145251396648,\n \"acc_norm_stderr\": 0.015476515438005566\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.761437908496732,\n \"acc_stderr\": 0.02440439492808787,\n \"acc_norm\": 0.761437908496732,\n \"acc_norm_stderr\": 0.02440439492808787\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.7106109324758842,\n \"acc_stderr\": 0.025755865922632945,\n \"acc_norm\": 0.7106109324758842,\n \"acc_norm_stderr\": 0.025755865922632945\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.7376543209876543,\n \"acc_stderr\": 0.024477222856135114,\n \"acc_norm\": 0.7376543209876543,\n \"acc_norm_stderr\": 0.024477222856135114\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.5,\n \"acc_stderr\": 0.029827499313594685,\n \"acc_norm\": 0.5,\n \"acc_norm_stderr\": 0.029827499313594685\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.4621903520208605,\n \"acc_stderr\": 0.012733671880342506,\n \"acc_norm\": 0.4621903520208605,\n \"acc_norm_stderr\": 0.012733671880342506\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.6838235294117647,\n \"acc_stderr\": 0.028245687391462937,\n \"acc_norm\": 0.6838235294117647,\n \"acc_norm_stderr\": 0.028245687391462937\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.6584967320261438,\n \"acc_stderr\": 0.019184639328092487,\n \"acc_norm\": 0.6584967320261438,\n \"acc_norm_stderr\": 0.019184639328092487\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6454545454545455,\n \"acc_stderr\": 0.045820048415054174,\n \"acc_norm\": 0.6454545454545455,\n \"acc_norm_stderr\": 0.045820048415054174\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.7306122448979592,\n \"acc_stderr\": 0.02840125202902294,\n \"acc_norm\": 0.7306122448979592,\n \"acc_norm_stderr\": 0.02840125202902294\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.8109452736318408,\n \"acc_stderr\": 0.02768691358801301,\n \"acc_norm\": 0.8109452736318408,\n \"acc_norm_stderr\": 0.02768691358801301\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.85,\n \"acc_stderr\": 0.03588702812826371,\n \"acc_norm\": 0.85,\n \"acc_norm_stderr\": 0.03588702812826371\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5602409638554217,\n \"acc_stderr\": 0.03864139923699122,\n \"acc_norm\": 0.5602409638554217,\n \"acc_norm_stderr\": 0.03864139923699122\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.8421052631578947,\n \"acc_stderr\": 0.02796678585916089,\n \"acc_norm\": 0.8421052631578947,\n \"acc_norm_stderr\": 0.02796678585916089\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.3708690330477356,\n \"mc1_stderr\": 0.016909693580248818,\n \"mc2\": 0.5507958189879629,\n \"mc2_stderr\": 0.015408052923903376\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7687450670876085,\n \"acc_stderr\": 0.011850040124850508\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.12964366944655042,\n \"acc_stderr\": 0.009252657757825552\n }\n}\n```", "repo_url": "https://huggingface.co/neovalle/H4rmoniousAnthea", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_01_25T05_56_18.084977", "path": ["**/details_harness|arc:challenge|25_2024-01-25T05-56-18.084977.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-01-25T05-56-18.084977.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_01_25T05_56_18.084977", "path": ["**/details_harness|gsm8k|5_2024-01-25T05-56-18.084977.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-01-25T05-56-18.084977.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_01_25T05_56_18.084977", "path": ["**/details_harness|hellaswag|10_2024-01-25T05-56-18.084977.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-01-25T05-56-18.084977.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_01_25T05_56_18.084977", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-25T05-56-18.084977.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-25T05-56-18.084977.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-25T05-56-18.084977.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-25T05-56-18.084977.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-25T05-56-18.084977.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-25T05-56-18.084977.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-25T05-56-18.084977.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-25T05-56-18.084977.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-25T05-56-18.084977.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-25T05-56-18.084977.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-25T05-56-18.084977.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-25T05-56-18.084977.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-25T05-56-18.084977.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-25T05-56-18.084977.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-25T05-56-18.084977.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-25T05-56-18.084977.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-25T05-56-18.084977.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-25T05-56-18.084977.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-25T05-56-18.084977.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-25T05-56-18.084977.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-25T05-56-18.084977.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-25T05-56-18.084977.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-25T05-56-18.084977.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-25T05-56-18.084977.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-25T05-56-18.084977.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-25T05-56-18.084977.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-25T05-56-18.084977.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-25T05-56-18.084977.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-25T05-56-18.084977.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-25T05-56-18.084977.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-25T05-56-18.084977.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-25T05-56-18.084977.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-25T05-56-18.084977.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-25T05-56-18.084977.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-25T05-56-18.084977.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-25T05-56-18.084977.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-25T05-56-18.084977.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-25T05-56-18.084977.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-25T05-56-18.084977.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-25T05-56-18.084977.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-25T05-56-18.084977.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-25T05-56-18.084977.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-25T05-56-18.084977.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-25T05-56-18.084977.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-25T05-56-18.084977.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-25T05-56-18.084977.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-25T05-56-18.084977.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-25T05-56-18.084977.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-25T05-56-18.084977.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-25T05-56-18.084977.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-25T05-56-18.084977.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-25T05-56-18.084977.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-25T05-56-18.084977.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-25T05-56-18.084977.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-25T05-56-18.084977.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-25T05-56-18.084977.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-25T05-56-18.084977.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-25T05-56-18.084977.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-25T05-56-18.084977.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-25T05-56-18.084977.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-25T05-56-18.084977.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-25T05-56-18.084977.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-25T05-56-18.084977.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-25T05-56-18.084977.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-25T05-56-18.084977.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-25T05-56-18.084977.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-25T05-56-18.084977.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-25T05-56-18.084977.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-25T05-56-18.084977.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-25T05-56-18.084977.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-25T05-56-18.084977.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-25T05-56-18.084977.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-25T05-56-18.084977.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-25T05-56-18.084977.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-25T05-56-18.084977.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-25T05-56-18.084977.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-25T05-56-18.084977.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-25T05-56-18.084977.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-25T05-56-18.084977.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-25T05-56-18.084977.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-25T05-56-18.084977.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-25T05-56-18.084977.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-25T05-56-18.084977.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-25T05-56-18.084977.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-25T05-56-18.084977.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-25T05-56-18.084977.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-25T05-56-18.084977.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-25T05-56-18.084977.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-25T05-56-18.084977.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-25T05-56-18.084977.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-25T05-56-18.084977.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-25T05-56-18.084977.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-25T05-56-18.084977.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-25T05-56-18.084977.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-25T05-56-18.084977.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-25T05-56-18.084977.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-25T05-56-18.084977.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-25T05-56-18.084977.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-25T05-56-18.084977.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-25T05-56-18.084977.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-25T05-56-18.084977.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-25T05-56-18.084977.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-25T05-56-18.084977.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-25T05-56-18.084977.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-25T05-56-18.084977.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-25T05-56-18.084977.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-25T05-56-18.084977.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-25T05-56-18.084977.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-25T05-56-18.084977.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-25T05-56-18.084977.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-25T05-56-18.084977.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-25T05-56-18.084977.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-25T05-56-18.084977.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-25T05-56-18.084977.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_01_25T05_56_18.084977", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-25T05-56-18.084977.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-25T05-56-18.084977.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_01_25T05_56_18.084977", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-25T05-56-18.084977.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-25T05-56-18.084977.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_01_25T05_56_18.084977", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-25T05-56-18.084977.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-25T05-56-18.084977.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_01_25T05_56_18.084977", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-25T05-56-18.084977.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-25T05-56-18.084977.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_01_25T05_56_18.084977", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-25T05-56-18.084977.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-25T05-56-18.084977.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_01_25T05_56_18.084977", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-25T05-56-18.084977.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-25T05-56-18.084977.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_01_25T05_56_18.084977", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-25T05-56-18.084977.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-25T05-56-18.084977.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_01_25T05_56_18.084977", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-25T05-56-18.084977.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-25T05-56-18.084977.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_01_25T05_56_18.084977", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-25T05-56-18.084977.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-25T05-56-18.084977.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_01_25T05_56_18.084977", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-25T05-56-18.084977.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-25T05-56-18.084977.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_01_25T05_56_18.084977", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-25T05-56-18.084977.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-25T05-56-18.084977.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_01_25T05_56_18.084977", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-25T05-56-18.084977.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-25T05-56-18.084977.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_01_25T05_56_18.084977", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-25T05-56-18.084977.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-25T05-56-18.084977.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_01_25T05_56_18.084977", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-25T05-56-18.084977.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-25T05-56-18.084977.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_01_25T05_56_18.084977", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-25T05-56-18.084977.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-25T05-56-18.084977.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_01_25T05_56_18.084977", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-25T05-56-18.084977.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-25T05-56-18.084977.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_01_25T05_56_18.084977", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-25T05-56-18.084977.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-25T05-56-18.084977.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_01_25T05_56_18.084977", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-25T05-56-18.084977.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-25T05-56-18.084977.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_01_25T05_56_18.084977", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-25T05-56-18.084977.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-25T05-56-18.084977.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_01_25T05_56_18.084977", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-25T05-56-18.084977.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-25T05-56-18.084977.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_01_25T05_56_18.084977", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-25T05-56-18.084977.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-25T05-56-18.084977.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_01_25T05_56_18.084977", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-25T05-56-18.084977.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-25T05-56-18.084977.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_01_25T05_56_18.084977", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-25T05-56-18.084977.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-25T05-56-18.084977.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_01_25T05_56_18.084977", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-25T05-56-18.084977.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-25T05-56-18.084977.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_01_25T05_56_18.084977", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-25T05-56-18.084977.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-25T05-56-18.084977.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_01_25T05_56_18.084977", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-25T05-56-18.084977.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-25T05-56-18.084977.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_01_25T05_56_18.084977", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-25T05-56-18.084977.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-25T05-56-18.084977.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_01_25T05_56_18.084977", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-25T05-56-18.084977.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-25T05-56-18.084977.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_01_25T05_56_18.084977", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-25T05-56-18.084977.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-25T05-56-18.084977.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_01_25T05_56_18.084977", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-25T05-56-18.084977.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-25T05-56-18.084977.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_01_25T05_56_18.084977", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-25T05-56-18.084977.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-25T05-56-18.084977.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_01_25T05_56_18.084977", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-25T05-56-18.084977.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-25T05-56-18.084977.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_01_25T05_56_18.084977", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-25T05-56-18.084977.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-25T05-56-18.084977.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_01_25T05_56_18.084977", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-25T05-56-18.084977.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-25T05-56-18.084977.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_01_25T05_56_18.084977", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-25T05-56-18.084977.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-25T05-56-18.084977.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_01_25T05_56_18.084977", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-25T05-56-18.084977.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-25T05-56-18.084977.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_01_25T05_56_18.084977", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-25T05-56-18.084977.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-25T05-56-18.084977.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_01_25T05_56_18.084977", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-25T05-56-18.084977.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-25T05-56-18.084977.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_01_25T05_56_18.084977", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-25T05-56-18.084977.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-25T05-56-18.084977.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_01_25T05_56_18.084977", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-25T05-56-18.084977.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-25T05-56-18.084977.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_01_25T05_56_18.084977", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-25T05-56-18.084977.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-25T05-56-18.084977.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_01_25T05_56_18.084977", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-25T05-56-18.084977.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-25T05-56-18.084977.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_01_25T05_56_18.084977", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-25T05-56-18.084977.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-25T05-56-18.084977.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_01_25T05_56_18.084977", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-25T05-56-18.084977.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-25T05-56-18.084977.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_01_25T05_56_18.084977", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-25T05-56-18.084977.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-25T05-56-18.084977.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_01_25T05_56_18.084977", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-25T05-56-18.084977.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-25T05-56-18.084977.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_01_25T05_56_18.084977", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-25T05-56-18.084977.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-25T05-56-18.084977.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_01_25T05_56_18.084977", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-25T05-56-18.084977.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-25T05-56-18.084977.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_01_25T05_56_18.084977", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-25T05-56-18.084977.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-25T05-56-18.084977.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_01_25T05_56_18.084977", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-25T05-56-18.084977.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-25T05-56-18.084977.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_01_25T05_56_18.084977", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-25T05-56-18.084977.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-25T05-56-18.084977.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_01_25T05_56_18.084977", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-25T05-56-18.084977.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-25T05-56-18.084977.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_01_25T05_56_18.084977", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-25T05-56-18.084977.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-25T05-56-18.084977.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_01_25T05_56_18.084977", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-25T05-56-18.084977.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-25T05-56-18.084977.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_01_25T05_56_18.084977", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-25T05-56-18.084977.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-25T05-56-18.084977.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_01_25T05_56_18.084977", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-25T05-56-18.084977.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-25T05-56-18.084977.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_01_25T05_56_18.084977", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-25T05-56-18.084977.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-25T05-56-18.084977.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_01_25T05_56_18.084977", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-25T05-56-18.084977.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-25T05-56-18.084977.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_01_25T05_56_18.084977", "path": ["**/details_harness|winogrande|5_2024-01-25T05-56-18.084977.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-01-25T05-56-18.084977.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_01_25T05_56_18.084977", "path": ["results_2024-01-25T05-56-18.084977.parquet"]}, {"split": "latest", "path": ["results_2024-01-25T05-56-18.084977.parquet"]}]}]}
2024-01-25T05:58:59+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of neovalle/H4rmoniousAnthea Dataset automatically created during the evaluation run of model neovalle/H4rmoniousAnthea on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-01-25T05:56:18.084977(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of neovalle/H4rmoniousAnthea\n\n\n\nDataset automatically created during the evaluation run of model neovalle/H4rmoniousAnthea on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-25T05:56:18.084977(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of neovalle/H4rmoniousAnthea\n\n\n\nDataset automatically created during the evaluation run of model neovalle/H4rmoniousAnthea on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-25T05:56:18.084977(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ 6, 181, 68, 4, 40, 29, 3, 4, 9, 6, 5, 7, 4, 7, 10, 9, 5, 9, 8, 10, 46, 8, 7, 10, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of neovalle/H4rmoniousAnthea\n\n\n\nDataset automatically created during the evaluation run of model neovalle/H4rmoniousAnthea on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2024-01-25T05:56:18.084977(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):## Dataset Details### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:## Uses### Direct Use### Out-of-Scope Use## Dataset Structure## Dataset Creation### Curation Rationale### Source Data#### Data Collection and Processing#### Who are the source data producers?### Annotations [optional]#### Annotation process#### Who are the annotators?#### Personal and Sensitive Information## Bias, Risks, and Limitations### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:## Glossary [optional]## More Information [optional]## Dataset Card Authors [optional]## Dataset Card Contact" ]
[ -0.04474504292011261, 0.2069152295589447, -0.0058732847683131695, 0.04726404324173927, 0.06679341942071915, -0.01764860190451145, 0.03304264321923256, 0.11187638342380524, 0.016523798927664757, 0.18389135599136353, -0.02196955867111683, 0.09383922815322876, 0.07871711254119873, 0.109660804271698, 0.030907249078154564, -0.14065343141555786, 0.02902369387447834, -0.09398583322763443, 0.1161007210612297, 0.05929063633084297, 0.05766461417078972, -0.0866495668888092, 0.06809722632169724, -0.02636369690299034, 0.04801437258720398, 0.00023864138347562402, -0.07002270221710205, -0.025399457663297653, 0.10922359675168991, 0.1032678484916687, 0.03879309818148613, -0.01502430159598589, 0.02962925285100937, -0.2627907991409302, 0.019979247823357582, 0.10078028589487076, -0.01351997721940279, 0.03623557463288307, 0.14806346595287323, -0.09156212210655212, 0.09583677351474762, -0.03420580178499222, 0.06983818113803864, 0.05487659573554993, -0.1063353419303894, -0.1642150580883026, -0.15823304653167725, 0.0003816351236309856, 0.06775085628032684, 0.04612309858202934, -0.024110563099384308, 0.13442768156528473, -0.05832548439502716, 0.04797516018152237, 0.14752380549907684, -0.13550755381584167, -0.019961319863796234, 0.03937685862183571, 0.012405069544911385, 0.09386017173528671, -0.07230575382709503, -0.017469298094511032, 0.03196217119693756, 0.06032715365290642, 0.00304664159193635, 0.012105684727430344, 0.008097480982542038, 0.020581604912877083, -0.15106885135173798, -0.1188245490193367, 0.10734528303146362, 0.004422004334628582, -0.04111041873693466, -0.16332127153873444, -0.03357213735580444, 0.02285650186240673, -0.0017879980150610209, 0.017799926921725273, 0.0012534518027678132, -0.022523434832692146, 0.09694138169288635, -0.019732924178242683, -0.09042272716760635, -0.026728520169854164, -0.01637185923755169, 0.0360197052359581, 0.036208782345056534, 0.0005859847296960652, 0.012476892210543156, 0.11746697127819061, 0.033205870538949966, -0.047806717455387115, -0.07501906901597977, -0.04861227795481682, -0.10868913680315018, -0.0287197083234787, 0.010626218281686306, -0.07868331670761108, 0.04737862944602966, 0.20258840918540955, -0.02203086018562317, 0.022839808836579323, -0.1173338070511818, 0.017298834398388863, 0.11286431550979614, 0.0518699549138546, -0.09054136276245117, -0.07468890398740768, -0.03339029476046562, 0.020152268931269646, 0.0299594234675169, -0.015849394723773003, 0.010504514910280704, 0.07102534174919128, 0.01580153778195381, 0.1282106190919876, 0.1194944679737091, 0.01785282976925373, -0.06162257492542267, -0.017508534714579582, 0.18906381726264954, -0.15701299905776978, -0.008389485068619251, 0.02545970119535923, -0.039184678345918655, -0.09775396436452866, 0.05838307365775108, -0.015357010066509247, -0.06713224947452545, 0.11395084112882614, -0.048598989844322205, -0.07992497831583023, -0.08753828704357147, -0.0733361691236496, 0.03972921893000603, -0.00446715485304594, -0.06529222428798676, -0.07190956920385361, -0.1207801103591919, -0.08280106633901596, 0.029742509126663208, -0.06939931213855743, -0.006020202301442623, 0.0034906305372714996, 0.01853136159479618, -0.011747041717171669, -0.014798691496253014, 0.11182522028684616, -0.0720660462975502, 0.02975912019610405, -0.04563172534108162, 0.03429943323135376, 0.09851657599210739, 0.030711278319358826, -0.10207166522741318, 0.08235134184360504, -0.10538072884082794, 0.09911755472421646, -0.1113717183470726, -0.02457522600889206, -0.120323546230793, 0.001385209383442998, -0.017811579629778862, 0.04000484198331833, -0.030258478596806526, 0.0801612064242363, -0.21008530259132385, -0.012316237203776836, 0.19610801339149475, -0.11566182971000671, -0.06449292600154877, 0.10252281278371811, -0.04140108823776245, 0.04155704751610756, 0.04851280897855759, 0.09170743077993393, 0.10218290984630585, -0.07990453392267227, -0.10201650112867355, -0.05476308986544609, -0.0282101072371006, 0.14148180186748505, 0.06347159296274185, -0.09236034005880356, 0.10162179917097092, 0.03370143473148346, 0.007219347637146711, -0.06286847591400146, -0.004303552210330963, -0.06234615296125412, -0.010634172707796097, -0.059397000819444656, -0.0679047480225563, -0.021747194230556488, -0.07143969088792801, -0.010722891427576542, -0.06658171862363815, -0.008155298419296741, 0.10337259620428085, -0.026325281709432602, 0.029542529955506325, -0.07705726474523544, 0.05767223611474037, 0.01671169139444828, 0.014189878478646278, -0.2066764533519745, -0.08240251243114471, 0.032581228762865067, -0.20595179498195648, 0.05112108215689659, 0.03264302760362625, 0.014472631737589836, 0.06605599075555801, -0.009712311439216137, 0.021845243871212006, 0.032753653824329376, -0.011426642537117004, -0.003345234552398324, -0.15407344698905945, -0.03808199614286423, -0.08056158572435379, 0.08643318712711334, -0.11939600110054016, -0.021281329914927483, 0.06754802167415619, 0.15870888531208038, 0.022556154057383537, -0.06786685436964035, 0.044599149376153946, 0.012184321880340576, -0.040022384375333786, -0.05104825645685196, -0.0005803400417789817, -0.02628304623067379, 0.03686261177062988, 0.05716176703572273, -0.17470353841781616, -0.11835926026105881, 0.0720987617969513, 0.13584335148334503, -0.05248907208442688, -0.07237153500318527, -0.06881940364837646, -0.05675876513123512, -0.09051252901554108, -0.06908567249774933, 0.0867922306060791, 0.0874931588768959, 0.054708752781152725, -0.07889831066131592, -0.056681711226701736, 0.005159230902791023, 0.051944196224212646, -0.06898863613605499, 0.11118141561746597, 0.07768706977367401, -0.09143176674842834, 0.10144777595996857, -0.04495116323232651, 0.08819908648729324, 0.10776788741350174, 0.0222773514688015, -0.1113928034901619, 0.007183634676039219, 0.05981817469000816, 0.05298902466893196, 0.06918915361166, -0.005177531857043505, 0.03535616770386696, 0.0851474404335022, -0.004383477382361889, 0.04559788480401039, -0.06870358437299728, 0.03301925212144852, 0.019010407850146294, 0.00048189720837399364, 0.03442711755633354, 0.010130545124411583, 0.027349554002285004, 0.0972335934638977, 0.01810433715581894, 0.07702436298131943, -0.042504217475652695, -0.0610814243555069, -0.0979682058095932, 0.1415855586528778, -0.09604590386152267, -0.24177873134613037, -0.16146357357501984, -0.048622071743011475, -0.03986310213804245, -0.01635613478720188, 0.05749678984284401, -0.002293802099302411, -0.10570995509624481, -0.12627528607845306, 0.04756658151745796, 0.04006950929760933, -0.1302657127380371, -0.05186382308602333, 0.03861190006136894, -0.01874643564224243, -0.16833922266960144, 0.033811524510383606, 0.04076831787824631, -0.06789681315422058, 0.01049923337996006, 0.06779960542917252, 0.10568199306726456, 0.09795961529016495, 0.08931221812963486, -0.01838310994207859, -0.02011314034461975, 0.15324395895004272, -0.10950460284948349, 0.03597710281610489, 0.08626838028430939, -0.028965145349502563, 0.07949767261743546, 0.14284537732601166, 0.007039621472358704, -0.07946109026670456, 0.052514076232910156, 0.10861825942993164, -0.05858209356665611, -0.2501284182071686, -0.11497630178928375, -0.031324949115514755, 0.03604297712445259, 0.09898386895656586, 0.07313714176416397, 0.010923697613179684, -0.00536432396620512, -0.11995139718055725, -0.035249605774879456, -0.03415321931242943, 0.057240843772888184, 0.043581243604421616, -0.01002498622983694, 0.04483923688530922, -0.04720349237322807, 0.01860732026398182, 0.13173425197601318, 0.03143112733960152, 0.17386780679225922, -0.046786997467279434, 0.18615366518497467, 0.09673504531383514, 0.07322020083665848, -0.03491721302270889, 0.04872921481728554, -0.019247977063059807, 0.07211773097515106, -0.02179597318172455, -0.10431559383869171, -0.020484302192926407, 0.10392686724662781, 0.04693372920155525, -0.06098950654268265, 0.052695825695991516, -0.07156597077846527, 0.049288198351860046, 0.24040570855140686, -0.01541829202324152, -0.1205817386507988, -0.030837563797831535, 0.06298283487558365, -0.04382039234042168, -0.08885790407657623, 0.005110660567879677, 0.08526051044464111, -0.1526433527469635, 0.016653763130307198, -0.03908483684062958, 0.07184523344039917, -0.12866553664207458, -0.027343591675162315, -0.04342282563447952, 0.04185067117214203, -0.015200169757008553, 0.10664177685976028, -0.13549059629440308, 0.09669223427772522, -0.009365363977849483, 0.015435815788805485, -0.07870098948478699, 0.05709743872284889, -0.013749102130532265, -0.052755944430828094, 0.14572931826114655, -0.0025699601974338293, -0.10260003805160522, -0.03909524157643318, -0.11329708248376846, -0.011455981060862541, 0.04735270142555237, -0.11003448814153671, 0.10715768486261368, 0.020333223044872284, -0.033545635640621185, -0.043320003896951675, -0.010514680296182632, -0.08866178244352341, -0.23735995590686798, 0.09849460422992706, -0.13600978255271912, 0.030112233012914658, -0.06352546811103821, -0.05017530918121338, -0.054474782198667526, 0.11369839310646057, -0.11500535905361176, -0.059515733271837234, -0.1084156408905983, -0.03097079135477543, 0.16175353527069092, -0.0643121749162674, 0.06387215852737427, -0.04183817654848099, 0.1718175858259201, -0.043685950338840485, -0.051660630851984024, 0.004627101123332977, -0.08072613924741745, -0.17856645584106445, -0.046876225620508194, 0.11286094039678574, 0.07479771971702576, 0.018180040642619133, -0.009122025221586227, 0.03552054986357689, 0.014212341047823429, -0.09126143902540207, 0.03623879700899124, 0.1373666226863861, 0.1409476399421692, 0.05123485252261162, -0.029515426605939865, -0.10527914762496948, -0.10213149338960648, -0.10542522370815277, 0.06730923056602478, 0.18593546748161316, -0.061913326382637024, 0.1571791023015976, 0.1449047327041626, -0.10678882896900177, -0.20797929167747498, -0.08084581047296524, -0.004287463612854481, -0.025778336450457573, 0.11208857595920563, -0.1977800726890564, 0.049793656915426254, 0.07839557528495789, -0.03007419779896736, 0.10710182040929794, -0.26480334997177124, -0.13917659223079681, 0.04341813549399376, 0.04984372854232788, -0.19439230859279633, -0.1560213267803192, -0.09653355926275253, -0.023660903796553612, -0.12931503355503082, 0.118560791015625, -0.02014952525496483, 0.029083656147122383, -0.021535582840442657, 0.06604573130607605, 0.04016799107193947, -0.06827059388160706, 0.11993792653083801, -0.024362947791814804, 0.027191802859306335, -0.09626414626836777, -0.019226333126425743, -0.02931283973157406, -0.04699581116437912, 0.07304082810878754, 0.016697369515895844, 0.04323222115635872, -0.08501983433961868, -0.029045848175883293, -0.07031130790710449, 0.04349333420395851, -0.06459686160087585, -0.058767203241586685, -0.07327646017074585, 0.09100302308797836, 0.09395908564329147, -0.00782862026244402, 0.036284539848566055, -0.05168921500444412, 0.042412642389535904, 0.21852993965148926, 0.08935258537530899, 0.03755771741271019, -0.08803699910640717, -0.04494083672761917, -0.014482665807008743, -0.0015088676009327173, -0.0921461209654808, 0.04991646111011505, 0.08633695542812347, 0.03691338375210762, 0.10005567967891693, -0.021489817649126053, -0.1951061189174652, 0.0008976963581517339, 0.07795365154743195, -0.10087114572525024, -0.21391889452934265, 0.03586573153734207, 0.10620643943548203, -0.12040761858224869, -0.08043714612722397, 0.0891958475112915, 0.021724805235862732, -0.03165198862552643, -0.0015706225531175733, 0.0737648755311966, 0.04556688293814659, 0.07978817820549011, 0.003358245827257633, 0.043838825076818466, -0.06692615151405334, 0.09950072318315506, 0.14714325964450836, -0.11254584044218063, 0.005564239341765642, 0.06264206022024155, -0.047912612557411194, -0.06902488321065903, -0.008836696855723858, 0.04783284291625023, 0.006263612303882837, -0.03600558638572693, 0.0035267400089651346, -0.047315891832113266, 0.07075051963329315, 0.16522252559661865, -0.009676848538219929, 0.04968028888106346, 0.022775495424866676, -0.005146445240825415, -0.052709516137838364, 0.11193227767944336, 0.02284017950296402, 0.04211563616991043, -0.01710350438952446, 0.034879863262176514, 0.01744123362004757, -0.019877271726727486, 0.018483232706785202, -0.05706320330500603, -0.06352575123310089, 0.007018042262643576, -0.19391882419586182, 0.05289114639163017, -0.07753574848175049, -0.004928058944642544, -0.0075585851445794106, -0.0010125855915248394, -0.0018145828507840633, 0.009898229502141476, -0.07246251404285431, -0.04192563518881798, -0.04439627006649971, 0.13374744355678558, -0.20011578500270844, -0.003851737128570676, 0.08150212466716766, -0.06809420138597488, 0.07066693902015686, -0.008449207991361618, -0.012111959978938103, 0.034870244562625885, -0.0702952891588211, -0.007826326414942741, -0.03651374578475952, 0.06540946662425995, 0.025542916730046272, -0.12538161873817444, -0.01847943291068077, -0.008613834157586098, -0.08191652595996857, -0.004419323522597551, 0.00846154149621725, -0.1375533789396286, 0.08482849597930908, 0.08694268018007278, -0.04579368606209755, -0.0428772009909153, 0.042864516377449036, 0.034029752016067505, 0.0065650842152535915, 0.08377602696418762, -0.02024582400918007, 0.02944597415626049, -0.1502395123243332, -0.03948710113763809, 0.009290069341659546, 0.003757817205041647, 0.04708275943994522, 0.00677367951720953, 0.022242816165089607, -0.004608322866261005, 0.2325543463230133, -0.012650285847485065, 0.012232448905706406, 0.0267924964427948, -0.017161471769213676, -0.042593371123075485, 0.03313933685421944, -0.013713742606341839, -0.00541891623288393, 0.02589459903538227, 0.009763633832335472, -0.03918937221169472, -0.05613142251968384, 0.007508398499339819, 0.09503830969333649, 0.1315087229013443, 0.20423603057861328, -0.03445708379149437, 0.05852967128157616, -0.1581616848707199, -0.051318924874067307, 0.002179455943405628, -0.04954472929239273, 0.055829402059316635, -0.0688764676451683, 0.05817946419119835, 0.10345587134361267, -0.11938728392124176, 0.1501019299030304, -0.04536747932434082, -0.024025917053222656, -0.0490434393286705, -0.17383527755737305, -0.04071984440088272, 0.03072378970682621, -0.0008767175604589283, -0.0895582064986229, 0.11763694137334824, 0.11784543842077255, 0.011284301057457924, -0.00037376375985331833, 0.07344109565019608, -0.07696044445037842, -0.05453018099069595, -0.02769930474460125, 0.010911478661000729, 0.023818757385015488, 0.010954798199236393, 0.06294796615839005, 0.002182452939450741, 0.04319661483168602, 0.06011543050408363, 0.09883888065814972, 0.037187360227108, 0.028886666521430016, -0.038368526846170425, -0.04500441998243332, 0.004643293563276529, -0.02922018989920616, -0.06262566894292831, 0.1943674236536026, 0.06496401876211166, 0.012139196507632732, 0.025547310709953308, 0.20764856040477753, -0.013127099722623825, -0.05714740231633186, -0.12260886281728745, 0.15580886602401733, 0.0020859427750110626, 0.0325503796339035, 0.03755055367946625, -0.10898388177156448, 0.006478193216025829, 0.15858183801174164, 0.10982296615839005, 0.019974542781710625, 0.012366083450615406, 0.04343477264046669, 0.024517089128494263, -0.032468028366565704, 0.051077891141176224, 0.03797030821442604, 0.23999559879302979, -0.05515106022357941, 0.08615308254957199, -0.019323548302054405, 0.005376506596803665, -0.045702703297138214, 0.11830168962478638, -0.05140344426035881, 0.021550487726926804, -0.056654900312423706, 0.07245620340108871, -0.06858200579881668, -0.24310724437236786, -0.025139080360531807, -0.07284682244062424, -0.13973356783390045, -0.009302760474383831, 0.018347784876823425, -0.02237418293952942, 0.04473721608519554, 0.03873191028833389, -0.029539205133914948, 0.19071020185947418, 0.007986895740032196, -0.06956169754266739, -0.08902868628501892, 0.06540989875793457, -0.05767061933875084, 0.2827502489089966, 0.0013074865564703941, 0.038077857345342636, 0.08568187057971954, -0.021004606038331985, -0.11879867315292358, 0.03484180569648743, 0.0907348245382309, -0.056121960282325745, 0.04617100954055786, 0.15339772403240204, -0.022573214024305344, 0.14667409658432007, 0.03156554698944092, 0.00562679348513484, 0.08010955899953842, 0.06791026890277863, 0.025251934304833412, -0.07616034895181656, 0.08093049377202988, -0.09442663192749023, 0.12331592291593552, 0.10919591784477234, -0.011545815505087376, 0.011251791380345821, -0.05460033565759659, 0.05239371210336685, -0.0471910685300827, 0.131797194480896, -0.016268175095319748, -0.1365521103143692, 0.04710635170340538, 0.003933606203645468, 0.0725187212228775, -0.23481488227844238, -0.05505480244755745, 0.09799134731292725, -0.0509345680475235, -0.005878755357116461, 0.08846944570541382, 0.04206952825188637, 0.01874636486172676, -0.05078864097595215, -0.12850093841552734, 0.014520694501698017, 0.10743078589439392, -0.06946776062250137, -0.035039402544498444 ]
3b0670138744607fde0d68d2221b58076c57b833
# Dataset Card for Evaluation run of kaitchup/Maixtchup-4x7b-QLoRA-SFT-UltraChat <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [kaitchup/Maixtchup-4x7b-QLoRA-SFT-UltraChat](https://huggingface.co/kaitchup/Maixtchup-4x7b-QLoRA-SFT-UltraChat) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_kaitchup__Maixtchup-4x7b-QLoRA-SFT-UltraChat", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-01-25T06:00:56.791934](https://huggingface.co/datasets/open-llm-leaderboard/details_kaitchup__Maixtchup-4x7b-QLoRA-SFT-UltraChat/blob/main/results_2024-01-25T06-00-56.791934.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.6074507414002527, "acc_stderr": 0.033077121699046634, "acc_norm": 0.6115958077535273, "acc_norm_stderr": 0.0337456609814203, "mc1": 0.3733170134638923, "mc1_stderr": 0.01693237055757063, "mc2": 0.5333005258566799, "mc2_stderr": 0.015528721157331138 }, "harness|arc:challenge|25": { "acc": 0.5733788395904437, "acc_stderr": 0.014453185592920293, "acc_norm": 0.6092150170648464, "acc_norm_stderr": 0.014258563880513782 }, "harness|hellaswag|10": { "acc": 0.6344353714399522, "acc_stderr": 0.004806039039008955, "acc_norm": 0.8323043218482374, "acc_norm_stderr": 0.0037283229688748988 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.31, "acc_stderr": 0.046482319871173156, "acc_norm": 0.31, "acc_norm_stderr": 0.046482319871173156 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.5777777777777777, "acc_stderr": 0.04266763404099582, "acc_norm": 0.5777777777777777, "acc_norm_stderr": 0.04266763404099582 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.6578947368421053, "acc_stderr": 0.03860731599316092, "acc_norm": 0.6578947368421053, "acc_norm_stderr": 0.03860731599316092 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.59, "acc_stderr": 0.04943110704237102, "acc_norm": 0.59, "acc_norm_stderr": 0.04943110704237102 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.6679245283018868, "acc_stderr": 0.02898545565233439, "acc_norm": 0.6679245283018868, "acc_norm_stderr": 0.02898545565233439 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.7361111111111112, "acc_stderr": 0.03685651095897532, "acc_norm": 0.7361111111111112, "acc_norm_stderr": 0.03685651095897532 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.5, "acc_stderr": 0.050251890762960605, "acc_norm": 0.5, "acc_norm_stderr": 0.050251890762960605 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.45, "acc_stderr": 0.049999999999999996, "acc_norm": 0.45, "acc_norm_stderr": 0.049999999999999996 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.32, "acc_stderr": 0.046882617226215034, "acc_norm": 0.32, "acc_norm_stderr": 0.046882617226215034 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.6242774566473989, "acc_stderr": 0.036928207672648664, "acc_norm": 0.6242774566473989, "acc_norm_stderr": 0.036928207672648664 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.37254901960784315, "acc_stderr": 0.04810840148082636, "acc_norm": 0.37254901960784315, "acc_norm_stderr": 0.04810840148082636 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.76, "acc_stderr": 0.04292346959909283, "acc_norm": 0.76, "acc_norm_stderr": 0.04292346959909283 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.5276595744680851, "acc_stderr": 0.03263597118409769, "acc_norm": 0.5276595744680851, "acc_norm_stderr": 0.03263597118409769 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.4298245614035088, "acc_stderr": 0.04657047260594964, "acc_norm": 0.4298245614035088, "acc_norm_stderr": 0.04657047260594964 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.5517241379310345, "acc_stderr": 0.04144311810878151, "acc_norm": 0.5517241379310345, "acc_norm_stderr": 0.04144311810878151 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.3862433862433862, "acc_stderr": 0.025075981767601688, "acc_norm": 0.3862433862433862, "acc_norm_stderr": 0.025075981767601688 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.38095238095238093, "acc_stderr": 0.04343525428949098, "acc_norm": 0.38095238095238093, "acc_norm_stderr": 0.04343525428949098 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.34, "acc_stderr": 0.04760952285695235, "acc_norm": 0.34, "acc_norm_stderr": 0.04760952285695235 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.7096774193548387, "acc_stderr": 0.02582210611941589, "acc_norm": 0.7096774193548387, "acc_norm_stderr": 0.02582210611941589 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.5172413793103449, "acc_stderr": 0.035158955511656986, "acc_norm": 0.5172413793103449, "acc_norm_stderr": 0.035158955511656986 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.67, "acc_stderr": 0.04725815626252607, "acc_norm": 0.67, "acc_norm_stderr": 0.04725815626252607 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.7212121212121212, "acc_stderr": 0.03501438706296781, "acc_norm": 0.7212121212121212, "acc_norm_stderr": 0.03501438706296781 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.7878787878787878, "acc_stderr": 0.0291265228345868, "acc_norm": 0.7878787878787878, "acc_norm_stderr": 0.0291265228345868 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.8341968911917098, "acc_stderr": 0.026839845022314415, "acc_norm": 0.8341968911917098, "acc_norm_stderr": 0.026839845022314415 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.6, "acc_stderr": 0.024838811988033165, "acc_norm": 0.6, "acc_norm_stderr": 0.024838811988033165 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.3111111111111111, "acc_stderr": 0.028226446749683515, "acc_norm": 0.3111111111111111, "acc_norm_stderr": 0.028226446749683515 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.634453781512605, "acc_stderr": 0.03128217706368461, "acc_norm": 0.634453781512605, "acc_norm_stderr": 0.03128217706368461 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.32450331125827814, "acc_stderr": 0.038227469376587525, "acc_norm": 0.32450331125827814, "acc_norm_stderr": 0.038227469376587525 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.8165137614678899, "acc_stderr": 0.016595259710399303, "acc_norm": 0.8165137614678899, "acc_norm_stderr": 0.016595259710399303 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.49074074074074076, "acc_stderr": 0.034093869469927006, "acc_norm": 0.49074074074074076, "acc_norm_stderr": 0.034093869469927006 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.75, "acc_stderr": 0.03039153369274154, "acc_norm": 0.75, "acc_norm_stderr": 0.03039153369274154 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.7721518987341772, "acc_stderr": 0.027303484599069422, "acc_norm": 0.7721518987341772, "acc_norm_stderr": 0.027303484599069422 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.6681614349775785, "acc_stderr": 0.031602951437766785, "acc_norm": 0.6681614349775785, "acc_norm_stderr": 0.031602951437766785 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.7251908396946565, "acc_stderr": 0.03915345408847836, "acc_norm": 0.7251908396946565, "acc_norm_stderr": 0.03915345408847836 }, "harness|hendrycksTest-international_law|5": { "acc": 0.7603305785123967, "acc_stderr": 0.03896878985070416, "acc_norm": 0.7603305785123967, "acc_norm_stderr": 0.03896878985070416 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.7222222222222222, "acc_stderr": 0.04330043749650742, "acc_norm": 0.7222222222222222, "acc_norm_stderr": 0.04330043749650742 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.6993865030674846, "acc_stderr": 0.03602511318806771, "acc_norm": 0.6993865030674846, "acc_norm_stderr": 0.03602511318806771 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.49107142857142855, "acc_stderr": 0.04745033255489123, "acc_norm": 0.49107142857142855, "acc_norm_stderr": 0.04745033255489123 }, "harness|hendrycksTest-management|5": { "acc": 0.7378640776699029, "acc_stderr": 0.04354631077260595, "acc_norm": 0.7378640776699029, "acc_norm_stderr": 0.04354631077260595 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8589743589743589, "acc_stderr": 0.022801382534597556, "acc_norm": 0.8589743589743589, "acc_norm_stderr": 0.022801382534597556 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.69, "acc_stderr": 0.04648231987117316, "acc_norm": 0.69, "acc_norm_stderr": 0.04648231987117316 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.7956577266922095, "acc_stderr": 0.014419123980931894, "acc_norm": 0.7956577266922095, "acc_norm_stderr": 0.014419123980931894 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.6820809248554913, "acc_stderr": 0.025070713719153176, "acc_norm": 0.6820809248554913, "acc_norm_stderr": 0.025070713719153176 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.2681564245810056, "acc_stderr": 0.014816119635317008, "acc_norm": 0.2681564245810056, "acc_norm_stderr": 0.014816119635317008 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.6699346405228758, "acc_stderr": 0.0269256546536157, "acc_norm": 0.6699346405228758, "acc_norm_stderr": 0.0269256546536157 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.6913183279742765, "acc_stderr": 0.026236965881153266, "acc_norm": 0.6913183279742765, "acc_norm_stderr": 0.026236965881153266 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.6759259259259259, "acc_stderr": 0.026041766202717156, "acc_norm": 0.6759259259259259, "acc_norm_stderr": 0.026041766202717156 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.4787234042553192, "acc_stderr": 0.029800481645628693, "acc_norm": 0.4787234042553192, "acc_norm_stderr": 0.029800481645628693 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.43089960886571055, "acc_stderr": 0.01264769588954723, "acc_norm": 0.43089960886571055, "acc_norm_stderr": 0.01264769588954723 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.625, "acc_stderr": 0.029408372932278746, "acc_norm": 0.625, "acc_norm_stderr": 0.029408372932278746 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.6535947712418301, "acc_stderr": 0.01924978569171721, "acc_norm": 0.6535947712418301, "acc_norm_stderr": 0.01924978569171721 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6818181818181818, "acc_stderr": 0.04461272175910509, "acc_norm": 0.6818181818181818, "acc_norm_stderr": 0.04461272175910509 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.7061224489795919, "acc_stderr": 0.02916273841024977, "acc_norm": 0.7061224489795919, "acc_norm_stderr": 0.02916273841024977 }, "harness|hendrycksTest-sociology|5": { "acc": 0.7114427860696517, "acc_stderr": 0.03203841040213322, "acc_norm": 0.7114427860696517, "acc_norm_stderr": 0.03203841040213322 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.79, "acc_stderr": 0.040936018074033256, "acc_norm": 0.79, "acc_norm_stderr": 0.040936018074033256 }, "harness|hendrycksTest-virology|5": { "acc": 0.46987951807228917, "acc_stderr": 0.03885425420866766, "acc_norm": 0.46987951807228917, "acc_norm_stderr": 0.03885425420866766 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.8362573099415205, "acc_stderr": 0.028380919596145866, "acc_norm": 0.8362573099415205, "acc_norm_stderr": 0.028380919596145866 }, "harness|truthfulqa:mc|0": { "mc1": 0.3733170134638923, "mc1_stderr": 0.01693237055757063, "mc2": 0.5333005258566799, "mc2_stderr": 0.015528721157331138 }, "harness|winogrande|5": { "acc": 0.7719021310181531, "acc_stderr": 0.0117930158176636 }, "harness|gsm8k|5": { "acc": 0.43214556482183475, "acc_stderr": 0.013645072137842443 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_kaitchup__Maixtchup-4x7b-QLoRA-SFT-UltraChat
[ "region:us" ]
2024-01-25T06:03:14+00:00
{"pretty_name": "Evaluation run of kaitchup/Maixtchup-4x7b-QLoRA-SFT-UltraChat", "dataset_summary": "Dataset automatically created during the evaluation run of model [kaitchup/Maixtchup-4x7b-QLoRA-SFT-UltraChat](https://huggingface.co/kaitchup/Maixtchup-4x7b-QLoRA-SFT-UltraChat) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_kaitchup__Maixtchup-4x7b-QLoRA-SFT-UltraChat\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-01-25T06:00:56.791934](https://huggingface.co/datasets/open-llm-leaderboard/details_kaitchup__Maixtchup-4x7b-QLoRA-SFT-UltraChat/blob/main/results_2024-01-25T06-00-56.791934.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.6074507414002527,\n \"acc_stderr\": 0.033077121699046634,\n \"acc_norm\": 0.6115958077535273,\n \"acc_norm_stderr\": 0.0337456609814203,\n \"mc1\": 0.3733170134638923,\n \"mc1_stderr\": 0.01693237055757063,\n \"mc2\": 0.5333005258566799,\n \"mc2_stderr\": 0.015528721157331138\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.5733788395904437,\n \"acc_stderr\": 0.014453185592920293,\n \"acc_norm\": 0.6092150170648464,\n \"acc_norm_stderr\": 0.014258563880513782\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6344353714399522,\n \"acc_stderr\": 0.004806039039008955,\n \"acc_norm\": 0.8323043218482374,\n \"acc_norm_stderr\": 0.0037283229688748988\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.31,\n \"acc_stderr\": 0.046482319871173156,\n \"acc_norm\": 0.31,\n \"acc_norm_stderr\": 0.046482319871173156\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.5777777777777777,\n \"acc_stderr\": 0.04266763404099582,\n \"acc_norm\": 0.5777777777777777,\n \"acc_norm_stderr\": 0.04266763404099582\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.6578947368421053,\n \"acc_stderr\": 0.03860731599316092,\n \"acc_norm\": 0.6578947368421053,\n \"acc_norm_stderr\": 0.03860731599316092\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.59,\n \"acc_stderr\": 0.04943110704237102,\n \"acc_norm\": 0.59,\n \"acc_norm_stderr\": 0.04943110704237102\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.6679245283018868,\n \"acc_stderr\": 0.02898545565233439,\n \"acc_norm\": 0.6679245283018868,\n \"acc_norm_stderr\": 0.02898545565233439\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.7361111111111112,\n \"acc_stderr\": 0.03685651095897532,\n \"acc_norm\": 0.7361111111111112,\n \"acc_norm_stderr\": 0.03685651095897532\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.5,\n \"acc_stderr\": 0.050251890762960605,\n \"acc_norm\": 0.5,\n \"acc_norm_stderr\": 0.050251890762960605\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.45,\n \"acc_stderr\": 0.049999999999999996,\n \"acc_norm\": 0.45,\n \"acc_norm_stderr\": 0.049999999999999996\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.32,\n \"acc_stderr\": 0.046882617226215034,\n \"acc_norm\": 0.32,\n \"acc_norm_stderr\": 0.046882617226215034\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.6242774566473989,\n \"acc_stderr\": 0.036928207672648664,\n \"acc_norm\": 0.6242774566473989,\n \"acc_norm_stderr\": 0.036928207672648664\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.37254901960784315,\n \"acc_stderr\": 0.04810840148082636,\n \"acc_norm\": 0.37254901960784315,\n \"acc_norm_stderr\": 0.04810840148082636\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.76,\n \"acc_stderr\": 0.04292346959909283,\n \"acc_norm\": 0.76,\n \"acc_norm_stderr\": 0.04292346959909283\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.5276595744680851,\n \"acc_stderr\": 0.03263597118409769,\n \"acc_norm\": 0.5276595744680851,\n \"acc_norm_stderr\": 0.03263597118409769\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.4298245614035088,\n \"acc_stderr\": 0.04657047260594964,\n \"acc_norm\": 0.4298245614035088,\n \"acc_norm_stderr\": 0.04657047260594964\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.5517241379310345,\n \"acc_stderr\": 0.04144311810878151,\n \"acc_norm\": 0.5517241379310345,\n \"acc_norm_stderr\": 0.04144311810878151\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.3862433862433862,\n \"acc_stderr\": 0.025075981767601688,\n \"acc_norm\": 0.3862433862433862,\n \"acc_norm_stderr\": 0.025075981767601688\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.38095238095238093,\n \"acc_stderr\": 0.04343525428949098,\n \"acc_norm\": 0.38095238095238093,\n \"acc_norm_stderr\": 0.04343525428949098\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.34,\n \"acc_stderr\": 0.04760952285695235,\n \"acc_norm\": 0.34,\n \"acc_norm_stderr\": 0.04760952285695235\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.7096774193548387,\n \"acc_stderr\": 0.02582210611941589,\n \"acc_norm\": 0.7096774193548387,\n \"acc_norm_stderr\": 0.02582210611941589\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.5172413793103449,\n \"acc_stderr\": 0.035158955511656986,\n \"acc_norm\": 0.5172413793103449,\n \"acc_norm_stderr\": 0.035158955511656986\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.67,\n \"acc_stderr\": 0.04725815626252607,\n \"acc_norm\": 0.67,\n \"acc_norm_stderr\": 0.04725815626252607\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.7212121212121212,\n \"acc_stderr\": 0.03501438706296781,\n \"acc_norm\": 0.7212121212121212,\n \"acc_norm_stderr\": 0.03501438706296781\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.7878787878787878,\n \"acc_stderr\": 0.0291265228345868,\n \"acc_norm\": 0.7878787878787878,\n \"acc_norm_stderr\": 0.0291265228345868\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.8341968911917098,\n \"acc_stderr\": 0.026839845022314415,\n \"acc_norm\": 0.8341968911917098,\n \"acc_norm_stderr\": 0.026839845022314415\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.6,\n \"acc_stderr\": 0.024838811988033165,\n \"acc_norm\": 0.6,\n \"acc_norm_stderr\": 0.024838811988033165\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.3111111111111111,\n \"acc_stderr\": 0.028226446749683515,\n \"acc_norm\": 0.3111111111111111,\n \"acc_norm_stderr\": 0.028226446749683515\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.634453781512605,\n \"acc_stderr\": 0.03128217706368461,\n \"acc_norm\": 0.634453781512605,\n \"acc_norm_stderr\": 0.03128217706368461\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.32450331125827814,\n \"acc_stderr\": 0.038227469376587525,\n \"acc_norm\": 0.32450331125827814,\n \"acc_norm_stderr\": 0.038227469376587525\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.8165137614678899,\n \"acc_stderr\": 0.016595259710399303,\n \"acc_norm\": 0.8165137614678899,\n \"acc_norm_stderr\": 0.016595259710399303\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.49074074074074076,\n \"acc_stderr\": 0.034093869469927006,\n \"acc_norm\": 0.49074074074074076,\n \"acc_norm_stderr\": 0.034093869469927006\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.75,\n \"acc_stderr\": 0.03039153369274154,\n \"acc_norm\": 0.75,\n \"acc_norm_stderr\": 0.03039153369274154\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.7721518987341772,\n \"acc_stderr\": 0.027303484599069422,\n \"acc_norm\": 0.7721518987341772,\n \"acc_norm_stderr\": 0.027303484599069422\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.6681614349775785,\n \"acc_stderr\": 0.031602951437766785,\n \"acc_norm\": 0.6681614349775785,\n \"acc_norm_stderr\": 0.031602951437766785\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.7251908396946565,\n \"acc_stderr\": 0.03915345408847836,\n \"acc_norm\": 0.7251908396946565,\n \"acc_norm_stderr\": 0.03915345408847836\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.7603305785123967,\n \"acc_stderr\": 0.03896878985070416,\n \"acc_norm\": 0.7603305785123967,\n \"acc_norm_stderr\": 0.03896878985070416\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.7222222222222222,\n \"acc_stderr\": 0.04330043749650742,\n \"acc_norm\": 0.7222222222222222,\n \"acc_norm_stderr\": 0.04330043749650742\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.6993865030674846,\n \"acc_stderr\": 0.03602511318806771,\n \"acc_norm\": 0.6993865030674846,\n \"acc_norm_stderr\": 0.03602511318806771\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.49107142857142855,\n \"acc_stderr\": 0.04745033255489123,\n \"acc_norm\": 0.49107142857142855,\n \"acc_norm_stderr\": 0.04745033255489123\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.7378640776699029,\n \"acc_stderr\": 0.04354631077260595,\n \"acc_norm\": 0.7378640776699029,\n \"acc_norm_stderr\": 0.04354631077260595\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8589743589743589,\n \"acc_stderr\": 0.022801382534597556,\n \"acc_norm\": 0.8589743589743589,\n \"acc_norm_stderr\": 0.022801382534597556\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.69,\n \"acc_stderr\": 0.04648231987117316,\n \"acc_norm\": 0.69,\n \"acc_norm_stderr\": 0.04648231987117316\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.7956577266922095,\n \"acc_stderr\": 0.014419123980931894,\n \"acc_norm\": 0.7956577266922095,\n \"acc_norm_stderr\": 0.014419123980931894\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.6820809248554913,\n \"acc_stderr\": 0.025070713719153176,\n \"acc_norm\": 0.6820809248554913,\n \"acc_norm_stderr\": 0.025070713719153176\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.2681564245810056,\n \"acc_stderr\": 0.014816119635317008,\n \"acc_norm\": 0.2681564245810056,\n \"acc_norm_stderr\": 0.014816119635317008\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.6699346405228758,\n \"acc_stderr\": 0.0269256546536157,\n \"acc_norm\": 0.6699346405228758,\n \"acc_norm_stderr\": 0.0269256546536157\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.6913183279742765,\n \"acc_stderr\": 0.026236965881153266,\n \"acc_norm\": 0.6913183279742765,\n \"acc_norm_stderr\": 0.026236965881153266\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.6759259259259259,\n \"acc_stderr\": 0.026041766202717156,\n \"acc_norm\": 0.6759259259259259,\n \"acc_norm_stderr\": 0.026041766202717156\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.4787234042553192,\n \"acc_stderr\": 0.029800481645628693,\n \"acc_norm\": 0.4787234042553192,\n \"acc_norm_stderr\": 0.029800481645628693\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.43089960886571055,\n \"acc_stderr\": 0.01264769588954723,\n \"acc_norm\": 0.43089960886571055,\n \"acc_norm_stderr\": 0.01264769588954723\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.625,\n \"acc_stderr\": 0.029408372932278746,\n \"acc_norm\": 0.625,\n \"acc_norm_stderr\": 0.029408372932278746\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.6535947712418301,\n \"acc_stderr\": 0.01924978569171721,\n \"acc_norm\": 0.6535947712418301,\n \"acc_norm_stderr\": 0.01924978569171721\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6818181818181818,\n \"acc_stderr\": 0.04461272175910509,\n \"acc_norm\": 0.6818181818181818,\n \"acc_norm_stderr\": 0.04461272175910509\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.7061224489795919,\n \"acc_stderr\": 0.02916273841024977,\n \"acc_norm\": 0.7061224489795919,\n \"acc_norm_stderr\": 0.02916273841024977\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.7114427860696517,\n \"acc_stderr\": 0.03203841040213322,\n \"acc_norm\": 0.7114427860696517,\n \"acc_norm_stderr\": 0.03203841040213322\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.79,\n \"acc_stderr\": 0.040936018074033256,\n \"acc_norm\": 0.79,\n \"acc_norm_stderr\": 0.040936018074033256\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.46987951807228917,\n \"acc_stderr\": 0.03885425420866766,\n \"acc_norm\": 0.46987951807228917,\n \"acc_norm_stderr\": 0.03885425420866766\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.8362573099415205,\n \"acc_stderr\": 0.028380919596145866,\n \"acc_norm\": 0.8362573099415205,\n \"acc_norm_stderr\": 0.028380919596145866\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.3733170134638923,\n \"mc1_stderr\": 0.01693237055757063,\n \"mc2\": 0.5333005258566799,\n \"mc2_stderr\": 0.015528721157331138\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7719021310181531,\n \"acc_stderr\": 0.0117930158176636\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.43214556482183475,\n \"acc_stderr\": 0.013645072137842443\n }\n}\n```", "repo_url": "https://huggingface.co/kaitchup/Maixtchup-4x7b-QLoRA-SFT-UltraChat", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_01_25T06_00_56.791934", "path": ["**/details_harness|arc:challenge|25_2024-01-25T06-00-56.791934.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-01-25T06-00-56.791934.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_01_25T06_00_56.791934", "path": ["**/details_harness|gsm8k|5_2024-01-25T06-00-56.791934.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-01-25T06-00-56.791934.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_01_25T06_00_56.791934", "path": ["**/details_harness|hellaswag|10_2024-01-25T06-00-56.791934.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-01-25T06-00-56.791934.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_01_25T06_00_56.791934", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-25T06-00-56.791934.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-25T06-00-56.791934.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-25T06-00-56.791934.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-25T06-00-56.791934.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-25T06-00-56.791934.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-25T06-00-56.791934.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-25T06-00-56.791934.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-25T06-00-56.791934.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-25T06-00-56.791934.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-25T06-00-56.791934.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-25T06-00-56.791934.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-25T06-00-56.791934.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-25T06-00-56.791934.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-25T06-00-56.791934.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-25T06-00-56.791934.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-25T06-00-56.791934.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-25T06-00-56.791934.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-25T06-00-56.791934.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-25T06-00-56.791934.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-25T06-00-56.791934.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-25T06-00-56.791934.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-25T06-00-56.791934.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-25T06-00-56.791934.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-25T06-00-56.791934.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-25T06-00-56.791934.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-25T06-00-56.791934.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-25T06-00-56.791934.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-25T06-00-56.791934.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-25T06-00-56.791934.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-25T06-00-56.791934.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-25T06-00-56.791934.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-25T06-00-56.791934.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-25T06-00-56.791934.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-25T06-00-56.791934.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-25T06-00-56.791934.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-25T06-00-56.791934.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-25T06-00-56.791934.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-25T06-00-56.791934.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-25T06-00-56.791934.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-25T06-00-56.791934.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-25T06-00-56.791934.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-25T06-00-56.791934.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-25T06-00-56.791934.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-25T06-00-56.791934.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-25T06-00-56.791934.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-25T06-00-56.791934.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-25T06-00-56.791934.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-25T06-00-56.791934.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-25T06-00-56.791934.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-25T06-00-56.791934.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-25T06-00-56.791934.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-25T06-00-56.791934.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-25T06-00-56.791934.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-25T06-00-56.791934.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-25T06-00-56.791934.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-25T06-00-56.791934.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-25T06-00-56.791934.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-25T06-00-56.791934.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-25T06-00-56.791934.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-25T06-00-56.791934.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-25T06-00-56.791934.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-25T06-00-56.791934.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-25T06-00-56.791934.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-25T06-00-56.791934.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-25T06-00-56.791934.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-25T06-00-56.791934.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-25T06-00-56.791934.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-25T06-00-56.791934.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-25T06-00-56.791934.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-25T06-00-56.791934.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-25T06-00-56.791934.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-25T06-00-56.791934.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-25T06-00-56.791934.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-25T06-00-56.791934.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-25T06-00-56.791934.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-25T06-00-56.791934.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-25T06-00-56.791934.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-25T06-00-56.791934.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-25T06-00-56.791934.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-25T06-00-56.791934.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-25T06-00-56.791934.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-25T06-00-56.791934.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-25T06-00-56.791934.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-25T06-00-56.791934.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-25T06-00-56.791934.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-25T06-00-56.791934.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-25T06-00-56.791934.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-25T06-00-56.791934.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-25T06-00-56.791934.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-25T06-00-56.791934.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-25T06-00-56.791934.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-25T06-00-56.791934.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-25T06-00-56.791934.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-25T06-00-56.791934.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-25T06-00-56.791934.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-25T06-00-56.791934.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-25T06-00-56.791934.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-25T06-00-56.791934.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-25T06-00-56.791934.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-25T06-00-56.791934.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-25T06-00-56.791934.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-25T06-00-56.791934.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-25T06-00-56.791934.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-25T06-00-56.791934.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-25T06-00-56.791934.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-25T06-00-56.791934.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-25T06-00-56.791934.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-25T06-00-56.791934.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-25T06-00-56.791934.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-25T06-00-56.791934.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-25T06-00-56.791934.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-25T06-00-56.791934.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-25T06-00-56.791934.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-25T06-00-56.791934.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_01_25T06_00_56.791934", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-25T06-00-56.791934.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-25T06-00-56.791934.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_01_25T06_00_56.791934", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-25T06-00-56.791934.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-25T06-00-56.791934.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_01_25T06_00_56.791934", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-25T06-00-56.791934.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-25T06-00-56.791934.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_01_25T06_00_56.791934", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-25T06-00-56.791934.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-25T06-00-56.791934.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_01_25T06_00_56.791934", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-25T06-00-56.791934.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-25T06-00-56.791934.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_01_25T06_00_56.791934", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-25T06-00-56.791934.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-25T06-00-56.791934.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_01_25T06_00_56.791934", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-25T06-00-56.791934.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-25T06-00-56.791934.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_01_25T06_00_56.791934", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-25T06-00-56.791934.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-25T06-00-56.791934.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_01_25T06_00_56.791934", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-25T06-00-56.791934.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-25T06-00-56.791934.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_01_25T06_00_56.791934", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-25T06-00-56.791934.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-25T06-00-56.791934.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_01_25T06_00_56.791934", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-25T06-00-56.791934.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-25T06-00-56.791934.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_01_25T06_00_56.791934", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-25T06-00-56.791934.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-25T06-00-56.791934.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_01_25T06_00_56.791934", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-25T06-00-56.791934.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-25T06-00-56.791934.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_01_25T06_00_56.791934", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-25T06-00-56.791934.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-25T06-00-56.791934.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_01_25T06_00_56.791934", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-25T06-00-56.791934.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-25T06-00-56.791934.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_01_25T06_00_56.791934", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-25T06-00-56.791934.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-25T06-00-56.791934.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_01_25T06_00_56.791934", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-25T06-00-56.791934.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-25T06-00-56.791934.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_01_25T06_00_56.791934", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-25T06-00-56.791934.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-25T06-00-56.791934.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_01_25T06_00_56.791934", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-25T06-00-56.791934.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-25T06-00-56.791934.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_01_25T06_00_56.791934", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-25T06-00-56.791934.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-25T06-00-56.791934.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_01_25T06_00_56.791934", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-25T06-00-56.791934.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-25T06-00-56.791934.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_01_25T06_00_56.791934", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-25T06-00-56.791934.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-25T06-00-56.791934.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_01_25T06_00_56.791934", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-25T06-00-56.791934.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-25T06-00-56.791934.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_01_25T06_00_56.791934", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-25T06-00-56.791934.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-25T06-00-56.791934.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_01_25T06_00_56.791934", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-25T06-00-56.791934.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-25T06-00-56.791934.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_01_25T06_00_56.791934", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-25T06-00-56.791934.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-25T06-00-56.791934.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_01_25T06_00_56.791934", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-25T06-00-56.791934.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-25T06-00-56.791934.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_01_25T06_00_56.791934", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-25T06-00-56.791934.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-25T06-00-56.791934.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_01_25T06_00_56.791934", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-25T06-00-56.791934.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-25T06-00-56.791934.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_01_25T06_00_56.791934", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-25T06-00-56.791934.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-25T06-00-56.791934.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_01_25T06_00_56.791934", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-25T06-00-56.791934.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-25T06-00-56.791934.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_01_25T06_00_56.791934", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-25T06-00-56.791934.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-25T06-00-56.791934.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_01_25T06_00_56.791934", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-25T06-00-56.791934.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-25T06-00-56.791934.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_01_25T06_00_56.791934", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-25T06-00-56.791934.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-25T06-00-56.791934.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_01_25T06_00_56.791934", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-25T06-00-56.791934.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-25T06-00-56.791934.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_01_25T06_00_56.791934", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-25T06-00-56.791934.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-25T06-00-56.791934.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_01_25T06_00_56.791934", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-25T06-00-56.791934.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-25T06-00-56.791934.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_01_25T06_00_56.791934", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-25T06-00-56.791934.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-25T06-00-56.791934.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_01_25T06_00_56.791934", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-25T06-00-56.791934.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-25T06-00-56.791934.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_01_25T06_00_56.791934", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-25T06-00-56.791934.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-25T06-00-56.791934.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_01_25T06_00_56.791934", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-25T06-00-56.791934.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-25T06-00-56.791934.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_01_25T06_00_56.791934", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-25T06-00-56.791934.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-25T06-00-56.791934.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_01_25T06_00_56.791934", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-25T06-00-56.791934.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-25T06-00-56.791934.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_01_25T06_00_56.791934", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-25T06-00-56.791934.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-25T06-00-56.791934.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_01_25T06_00_56.791934", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-25T06-00-56.791934.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-25T06-00-56.791934.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_01_25T06_00_56.791934", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-25T06-00-56.791934.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-25T06-00-56.791934.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_01_25T06_00_56.791934", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-25T06-00-56.791934.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-25T06-00-56.791934.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_01_25T06_00_56.791934", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-25T06-00-56.791934.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-25T06-00-56.791934.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_01_25T06_00_56.791934", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-25T06-00-56.791934.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-25T06-00-56.791934.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_01_25T06_00_56.791934", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-25T06-00-56.791934.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-25T06-00-56.791934.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_01_25T06_00_56.791934", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-25T06-00-56.791934.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-25T06-00-56.791934.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_01_25T06_00_56.791934", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-25T06-00-56.791934.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-25T06-00-56.791934.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_01_25T06_00_56.791934", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-25T06-00-56.791934.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-25T06-00-56.791934.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_01_25T06_00_56.791934", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-25T06-00-56.791934.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-25T06-00-56.791934.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_01_25T06_00_56.791934", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-25T06-00-56.791934.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-25T06-00-56.791934.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_01_25T06_00_56.791934", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-25T06-00-56.791934.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-25T06-00-56.791934.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_01_25T06_00_56.791934", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-25T06-00-56.791934.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-25T06-00-56.791934.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_01_25T06_00_56.791934", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-25T06-00-56.791934.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-25T06-00-56.791934.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_01_25T06_00_56.791934", "path": ["**/details_harness|winogrande|5_2024-01-25T06-00-56.791934.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-01-25T06-00-56.791934.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_01_25T06_00_56.791934", "path": ["results_2024-01-25T06-00-56.791934.parquet"]}, {"split": "latest", "path": ["results_2024-01-25T06-00-56.791934.parquet"]}]}]}
2024-01-25T06:03:38+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of kaitchup/Maixtchup-4x7b-QLoRA-SFT-UltraChat Dataset automatically created during the evaluation run of model kaitchup/Maixtchup-4x7b-QLoRA-SFT-UltraChat on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-01-25T06:00:56.791934(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of kaitchup/Maixtchup-4x7b-QLoRA-SFT-UltraChat\n\n\n\nDataset automatically created during the evaluation run of model kaitchup/Maixtchup-4x7b-QLoRA-SFT-UltraChat on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-25T06:00:56.791934(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of kaitchup/Maixtchup-4x7b-QLoRA-SFT-UltraChat\n\n\n\nDataset automatically created during the evaluation run of model kaitchup/Maixtchup-4x7b-QLoRA-SFT-UltraChat on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-25T06:00:56.791934(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ 6, 207, 68, 4, 40, 29, 3, 4, 9, 6, 5, 7, 4, 7, 10, 9, 5, 9, 8, 10, 46, 8, 7, 10, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of kaitchup/Maixtchup-4x7b-QLoRA-SFT-UltraChat\n\n\n\nDataset automatically created during the evaluation run of model kaitchup/Maixtchup-4x7b-QLoRA-SFT-UltraChat on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2024-01-25T06:00:56.791934(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):## Dataset Details### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:## Uses### Direct Use### Out-of-Scope Use## Dataset Structure## Dataset Creation### Curation Rationale### Source Data#### Data Collection and Processing#### Who are the source data producers?### Annotations [optional]#### Annotation process#### Who are the annotators?#### Personal and Sensitive Information## Bias, Risks, and Limitations### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:## Glossary [optional]" ]
[ -0.09579823166131973, 0.16401518881320953, -0.004962581675499678, 0.03237170726060867, 0.06892703473567963, -0.03604154288768768, -0.008547302335500717, 0.13390590250492096, 0.02523072436451912, 0.17936858534812927, -0.012984898872673512, 0.09682302176952362, 0.08903703093528748, 0.22589179873466492, 0.013314777985215187, -0.13576817512512207, 0.027850475162267685, -0.08254553377628326, 0.0743044912815094, 0.08045255392789841, 0.08355244994163513, -0.11024590581655502, 0.07450266927480698, -0.008517514914274216, -0.03960493952035904, -0.004481561481952667, -0.07156770676374435, -0.04224414378404617, 0.08400441706180573, 0.06648917496204376, 0.009050922468304634, -0.02325199730694294, -0.0015657749027013779, -0.22814469039440155, 0.02661634236574173, 0.08272263407707214, 0.03477806970477104, 0.08502990007400513, 0.13628894090652466, -0.10036060959100723, 0.08434862643480301, -0.07224123924970627, 0.036487799137830734, 0.05033865571022034, -0.11400984972715378, -0.08399492502212524, -0.17432236671447754, 0.011420819908380508, 0.09049589186906815, 0.0591411292552948, -0.026088980957865715, 0.1210906058549881, 0.005686806980520487, 0.03745080903172493, 0.16957591474056244, -0.1270325928926468, -0.02908598631620407, -0.031433526426553726, 0.044797737151384354, 0.04282957687973976, -0.11883406341075897, -0.02332831360399723, 0.033763330429792404, 0.03771168738603592, 0.005828789435327053, 0.016807839274406433, -0.014361518435180187, 0.018394524231553078, -0.13788758218288422, -0.06517677754163742, 0.10874203592538834, 0.005582417361438274, -0.06758784502744675, -0.13315697014331818, -0.03255568444728851, -0.0840098187327385, -0.01359676755964756, -0.01117923017591238, 0.013237910345196724, -0.024753205478191376, 0.05141409859061241, -0.023286787793040276, -0.09245902299880981, -0.01873745024204254, -0.046162184327840805, 0.04954869672656059, 0.01391507126390934, 0.00942448154091835, -0.0007912336732260883, 0.11289520561695099, 0.04239501804113388, -0.09104084968566895, -0.12838751077651978, -0.0541108176112175, -0.10523664206266403, -0.045830607414245605, 0.01157604344189167, -0.061553601175546646, 0.03637484461069107, 0.21218575537204742, -0.08188217133283615, 0.015175572596490383, -0.08632367849349976, -0.0030511377844959497, 0.11341359466314316, 0.08631346374750137, -0.006128962151706219, -0.070591039955616, -0.006322725210338831, 0.02987167052924633, 0.02324785105884075, -0.011317272670567036, 0.04602612927556038, 0.05796841159462929, 0.03455697372555733, 0.12500730156898499, 0.11934641003608704, 0.002724054967984557, -0.057363905012607574, -0.029944945126771927, 0.17249761521816254, -0.1844782680273056, -0.0018257583724334836, 0.007443087175488472, -0.0648033395409584, -0.09107033908367157, 0.05562201887369156, -0.013018341735005379, -0.05846330523490906, 0.07977709919214249, -0.0751076266169548, -0.04826105013489723, -0.08493759483098984, -0.04316459223628044, 0.053661007434129715, -0.02091214805841446, -0.061977554112672806, -0.05446697026491165, -0.13284221291542053, -0.09471390396356583, -0.012323110364377499, -0.08597800880670547, 0.002694485941901803, 0.026097668334841728, -0.00787674356251955, -0.018595479428768158, -0.021611493080854416, 0.1311451941728592, -0.0768004059791565, 0.026274165138602257, -0.013704049400985241, 0.004911168944090605, 0.09505429863929749, 0.04811966046690941, -0.14208316802978516, 0.07562904804944992, -0.09636816382408142, 0.12411458790302277, -0.09239380061626434, 0.007364845369011164, -0.13979795575141907, -0.0051995133981108665, -0.05414964258670807, 0.014651183038949966, 0.01487389300018549, 0.12478438764810562, -0.23338282108306885, -0.009090096689760685, 0.1372268795967102, -0.11130563169717789, -0.10162344574928284, 0.02758762426674366, -0.028697453439235687, 0.06200460344552994, 0.06902315467596054, 0.07584860920906067, 0.09730209410190582, -0.05565441772341728, -0.14361374080181122, -0.10851363092660904, -0.010402962565422058, 0.11145110428333282, 0.04939907416701317, -0.0698007270693779, 0.13940466940402985, 0.030306551605463028, -0.0040338048711419106, -0.044222813099622726, -0.027245327830314636, -0.04394644871354103, -0.02182680554687977, -0.05487844720482826, -0.10116168111562729, -0.00969198253005743, -0.05669257044792175, -0.03445429354906082, -0.07235626131296158, 0.04752248525619507, 0.09794007241725922, 0.014994590543210506, 0.015275176614522934, -0.0947621539235115, 0.06489651650190353, 0.005468341056257486, 0.0053479294292628765, -0.22158867120742798, -0.10696105659008026, 0.039717432111501694, -0.13162674009799957, 0.020408419892191887, -0.025827491655945778, 0.014688174240291119, 0.04482186585664749, 0.012450282461941242, 0.010575748980045319, 0.024197319522500038, -0.005774157587438822, -0.030341600999236107, -0.13308018445968628, -0.038590528070926666, -0.07649270445108414, 0.05538872629404068, -0.1329747885465622, -0.03233514726161957, 0.10687348246574402, 0.17824645340442657, 0.029774261638522148, -0.10627499967813492, 0.07640467584133148, -0.009091576561331749, -0.049233995378017426, -0.07214230298995972, -0.00826216023415327, -0.01840771734714508, 0.06309550255537033, 0.06557553261518478, -0.16719229519367218, -0.12044253945350647, 0.07585875689983368, 0.16669365763664246, -0.05965227261185646, -0.07451535016298294, -0.11143597960472107, -0.03668844327330589, -0.09739582240581512, -0.04552074521780014, 0.0479869544506073, 0.0765651986002922, 0.02772350423038006, -0.05717960372567177, -0.09769122302532196, -0.012463848106563091, 0.07828190922737122, -0.06870345026254654, 0.10274363309144974, 0.03206408768892288, -0.1368730068206787, 0.09731675684452057, 0.013648665510118008, 0.15503817796707153, 0.10438095033168793, -0.0056844656355679035, -0.1332951784133911, 0.0008440009551122785, 0.051312465220689774, 0.03881672024726868, 0.07738491147756577, 0.0110810287296772, 0.026932736858725548, 0.07454933226108551, -0.009739605709910393, 0.04681159555912018, -0.04889729246497154, 0.035209137946367264, 0.016953179612755775, -0.00785962212830782, 0.03672518953680992, 0.030017754063010216, 0.026125753298401833, 0.07676222175359726, 0.03142744302749634, 0.12256482243537903, -0.03401212766766548, -0.044575851410627365, -0.09604358673095703, 0.14793381094932556, -0.09222139418125153, -0.22267325222492218, -0.14273953437805176, -0.05229518562555313, -0.009679250419139862, -0.010482553392648697, 0.04938528314232826, -0.009765109047293663, -0.09607715904712677, -0.1248883306980133, 0.05510100722312927, 0.06018628180027008, -0.0856737494468689, -0.02546432428061962, 0.01964728534221649, 0.024098927155137062, -0.16289269924163818, 0.036943525075912476, 0.02873116172850132, -0.037057507783174515, -0.02392699010670185, 0.09843219071626663, 0.12297791242599487, 0.05653928965330124, 0.0472378171980381, -0.0168915968388319, -0.0063627054914832115, 0.21685516834259033, -0.09728027135133743, 0.0411294586956501, 0.1225443109869957, -0.04200609773397446, 0.057257503271102905, 0.11780677735805511, -0.008231406100094318, -0.08429453521966934, 0.04255412891507149, 0.08850377053022385, -0.05082402750849724, -0.2757611572742462, -0.06079331785440445, -0.03060426004230976, -0.004468352533876896, 0.0826263576745987, 0.09336219727993011, -0.017021050676703453, 0.012249782681465149, -0.12226814776659012, -0.08411570638418198, -0.05496230348944664, 0.053708553314208984, 0.03573211282491684, 0.0148347532376647, 0.0529337041079998, -0.05171654745936394, 0.05818435177206993, 0.12547285854816437, 0.01368529163300991, 0.1595352590084076, -0.06139962747693062, 0.15030443668365479, 0.09196742624044418, 0.09543625265359879, -0.017679547891020775, 0.06966123729944229, -0.004844766575843096, 0.06270744651556015, 0.012909396551549435, -0.08767110109329224, -0.04195598140358925, 0.0810333788394928, -0.014688471332192421, -0.004166890867054462, 0.04629186913371086, -0.039015207439661026, 0.07144865393638611, 0.21047179400920868, 0.015015375800430775, -0.16788306832313538, -0.10158591717481613, 0.05028451234102249, -0.03941112011671066, -0.08579182624816895, -0.023122746497392654, 0.06316885352134705, -0.13229626417160034, 0.046913981437683105, -0.03943423181772232, 0.07747508585453033, -0.09731792658567429, -0.0236249677836895, 0.02079932764172554, 0.07961307466030121, -0.02939421311020851, 0.08119052648544312, -0.11902248114347458, 0.09362678974866867, -0.0018979987362399697, 0.059860147535800934, -0.059828054159879684, 0.055798545479774475, 0.008981454186141491, -0.07353106886148453, 0.13236580789089203, 0.007018097676336765, -0.1526397466659546, -0.057323966175317764, -0.127266526222229, 0.002009425777941942, 0.051400233060121536, -0.11849203705787659, 0.11230695992708206, 0.01025210227817297, -0.023565810173749924, -0.03967120125889778, -0.049573931843042374, -0.10645199567079544, -0.1964007467031479, 0.10252204537391663, -0.12359578162431717, 0.08918692916631699, -0.06707679480314255, -0.04928913712501526, -0.008656720630824566, 0.16792328655719757, -0.11084742099046707, -0.07166151702404022, -0.14623647928237915, 0.06870321184396744, 0.17770998179912567, -0.07265398651361465, 0.07577427476644516, -0.03229030221700668, 0.2100285291671753, 0.021439170464873314, -0.06654700636863708, 0.007489650044590235, -0.08408705145120621, -0.186808779835701, -0.030139219015836716, 0.14406011998653412, 0.07229384779930115, -0.0060936082154512405, 0.003773496486246586, 0.07496970891952515, 0.00012718109064735472, -0.08035221695899963, 0.0736275240778923, 0.08159299194812775, 0.09072684496641159, 0.0227980799973011, -0.047303859144449234, -0.11767209321260452, -0.11878971755504608, -0.09913527965545654, 0.07141415774822235, 0.1344507336616516, -0.042496003210544586, 0.13635416328907013, 0.07718909531831741, -0.09542588144540787, -0.17692914605140686, -0.02002076804637909, 0.025157634168863297, -0.008582559414207935, 0.11333366483449936, -0.1774989515542984, 0.08633071929216385, 0.09144897758960724, -0.026424597948789597, 0.16899575293064117, -0.23172087967395782, -0.13674309849739075, 0.02212548442184925, 0.027963634580373764, -0.1877918392419815, -0.14172203838825226, -0.12442494928836823, -0.01124854851514101, -0.10768693685531616, 0.12052473425865173, 0.00796540081501007, 0.026774272322654724, -0.021989675238728523, 0.05288754403591156, 0.03968397527933121, -0.04806779697537422, 0.11831865459680557, -0.013197788037359715, 0.019193898886442184, -0.0870642364025116, -0.01084254402667284, -0.033968180418014526, -0.06404374539852142, 0.05953593552112579, 0.04444795846939087, 0.07401081919670105, -0.09267333894968033, -0.03261631727218628, -0.04966218024492264, 0.07162424176931381, -0.04890556260943413, -0.03696020320057869, -0.06348659098148346, 0.08277858048677444, 0.0595380999147892, -0.01777118444442749, 0.03409286215901375, -0.03653021901845932, 0.07547535747289658, 0.20360347628593445, 0.046694085001945496, 0.0526464581489563, -0.08512893319129944, -0.02119670808315277, 0.0009162441128864884, -0.02350759319961071, -0.10285943001508713, 0.03827254846692085, 0.09877387434244156, 0.04814505577087402, 0.06697504222393036, -0.02042921632528305, -0.1955357939004898, 0.02223214879631996, 0.09719743579626083, -0.11188191175460815, -0.1786910742521286, 0.02672986313700676, 0.11532159149646759, -0.12466094642877579, -0.03825478255748749, 0.09996107965707779, 0.02954580821096897, -0.039363544434309006, 0.001441278145648539, 0.0830523669719696, 0.055822379887104034, 0.11489475518465042, -0.017327453941106796, 0.043176375329494476, -0.07978557795286179, 0.11047539114952087, 0.1131696105003357, -0.0784219279885292, 0.006471924018114805, 0.11271602660417557, -0.05532125383615494, -0.04002014175057411, -0.007532934192568064, 0.017026152461767197, -0.024638373404741287, -0.023168722167611122, 0.0059087080880999565, -0.054087553173303604, 0.08332114666700363, 0.19758863747119904, -0.019506514072418213, 0.054546866565942764, 0.03769924119114876, 0.0015494122635573149, -0.031007355079054832, 0.12567739188671112, 0.0464879609644413, 0.044378072023391724, -0.025475680828094482, 0.031585242599248886, 0.008432559669017792, -0.005207908805459738, 0.014383832924067974, -0.043048493564128876, -0.020705100148916245, -0.023185955360531807, -0.17437009513378143, 0.013653299771249294, -0.0812051072716713, -0.04016268253326416, -0.026356447488069534, -0.053049031645059586, -0.03364171087741852, 0.034050147980451584, -0.06413407623767853, -0.07045049220323563, -0.07042962312698364, 0.10068951547145844, -0.21517635881900787, 0.01518569141626358, 0.08976928144693375, -0.07092215865850449, 0.08578131347894669, 0.042551375925540924, -0.0010214220965281129, 0.014139906503260136, -0.09329354763031006, -0.03953922167420387, -0.017131730914115906, 0.027874527499079704, 0.04508885741233826, -0.15678130090236664, -0.0034595381002873182, 0.015898006036877632, -0.07491859793663025, -0.024341940879821777, 0.050970111042261124, -0.14695538580417633, -0.00018525343330111355, 0.0640987902879715, -0.007657916285097599, -0.0287407748401165, 0.02117634005844593, 0.07666744291782379, 0.006927647162228823, 0.09517811238765717, -0.01113617978990078, 0.0381811186671257, -0.18044282495975494, -0.027829870581626892, -0.0069251274690032005, -0.017072154209017754, -0.002101338468492031, 0.04082384333014488, 0.04741504043340683, -0.014603323303163052, 0.18342794477939606, -0.0405704565346241, 0.0215670894831419, 0.0710923969745636, -0.03454187139868736, -0.08695088326931, 0.04368685185909271, 0.061652980744838715, 0.023612696677446365, 0.02310619130730629, 0.03486824408173561, -0.048326607793569565, -0.03372599557042122, -0.04174463078379631, 0.11176781356334686, 0.16464278101921082, 0.16768424212932587, -0.003387856064364314, 0.05609060078859329, -0.15729694068431854, -0.04632730782032013, -0.0040849982760846615, -0.07144110649824142, 0.03528512269258499, -0.05455491319298744, 0.08876020461320877, 0.08392779529094696, -0.13046008348464966, 0.09107256680727005, -0.07449673116207123, -0.028467411175370216, -0.036108192056417465, -0.12809470295906067, -0.039081599563360214, 0.008989720605313778, 0.001378092565573752, -0.09369407594203949, 0.12328772246837616, 0.09996934980154037, 0.004739297553896904, -0.009488973766565323, 0.12229620665311813, -0.09936507791280746, -0.07273625582456589, 0.0002869191230274737, 0.021803002804517746, 0.02270430326461792, 0.03196847811341286, 0.05101870745420456, 0.019773347303271294, 0.08328134566545486, 0.08226387947797775, 0.07559488713741302, 0.04791727662086487, 0.04333442822098732, -0.02643573097884655, -0.06601934880018234, 0.015679344534873962, -0.010402150452136993, -0.04082608222961426, 0.15315178036689758, 0.05227823555469513, 0.045501958578825, 0.013143882155418396, 0.24744369089603424, 0.0005030584870837629, -0.02053319476544857, -0.11834757775068283, 0.09273672848939896, 0.030133787542581558, 0.00571861956268549, 0.026168495416641235, -0.14893794059753418, 0.02189161628484726, 0.16892048716545105, 0.06799629330635071, 0.02971772849559784, -0.0021199139300733805, 0.018495263531804085, 0.025225253775715828, -0.01589224487543106, 0.016913043335080147, 0.06706589460372925, 0.17492395639419556, -0.04007117450237274, 0.06274598836898804, -0.00841812789440155, -0.03382541611790657, -0.013703437522053719, 0.1158340722322464, -0.06052921712398529, 0.03873966634273529, -0.07003012299537659, 0.09212459623813629, -0.07295085489749908, -0.3239504098892212, -0.005391482263803482, -0.10197418928146362, -0.15907429158687592, -0.013976894319057465, 0.03946356102824211, -0.01831761561334133, 0.034530527889728546, 0.05191340297460556, -0.019123520702123642, 0.16106142103672028, 0.013801123946905136, -0.06242940574884415, -0.09010743349790573, 0.0924910232424736, -0.0682554543018341, 0.24555043876171112, -0.011207583360373974, -0.002698245458304882, 0.08423158526420593, -0.01888737827539444, -0.19913725554943085, 0.011103067547082901, 0.07405013591051102, -0.07566535472869873, 0.048474498093128204, 0.18717791140079498, -0.007310176268219948, 0.0957917869091034, 0.0624719075858593, 0.001095293089747429, 0.043067459017038345, 0.029391732066869736, 0.011367175728082657, -0.08590726554393768, 0.04510347545146942, -0.0757768452167511, 0.1404135674238205, 0.13760223984718323, -0.04059337452054024, 0.0215213093906641, -0.060027386993169785, 0.06886269152164459, -0.03064810298383236, 0.10949153453111649, 0.006196151487529278, -0.18755120038986206, 0.06090925633907318, 0.06332182139158249, 0.05691440403461456, -0.20941096544265747, -0.045611243695020676, 0.07739374041557312, -0.04925185814499855, -0.0012011770159006119, 0.12875767052173615, 0.031466830521821976, 0.01614225283265114, -0.05440744757652283, -0.030872996896505356, 0.016009122133255005, 0.10511709749698639, -0.08328288048505783, -0.029032893478870392 ]
a43f39ae0fffef1156302fc6a88f3215198862e7
# SEA-LION-Pile SEA-LION-Pile is the pretraining data set for SEA-LION, a collection of Large Language Models (LLMs) which has been pretrained and instruct-tuned for the Southeast Asia (SEA) region. This repository contains the cleaned mC4 portion of the SEA-LION-Pile. For the remainder of the SEA-LION-Pile dataset, they may be downloaded from the links provided below. ## Dataset Details SEA-LION was trained on 980B tokens of the following data: | Data Source | Unique Tokens | Multiplier | Total Tokens | Percentage | |---------------------------|:-------------:|:----------:|:------------:|:----------:| | RefinedWeb - English | 571.3B | 1 | 571.3B | 58.20% | | mC4 - Chinese | 91.2B | 1 | 91.2B | 9.29% | | mC4 - Indonesian | 3.68B | 4 | 14.7B | 1.50% | | mC4 - Malay | 0.72B | 4 | 2.9B | 0.29% | | mC4 - Filipino | 1.32B | 4 | 5.3B | 0.54% | | mC4 - Burmese | 1.2B | 4 | 4.9B | 0.49% | | mC4 - Vietnamese | 63.4B | 1 | 63.4B | 6.46% | | mC4 - Thai | 5.8B | 2 | 11.6B | 1.18% | | WangChanBERTa - Thai | 5B | 2 | 10B | 1.02% | | mC4 - Lao | 0.27B | 4 | 1.1B | 0.12% | | mC4 - Khmer | 0.97B | 4 | 3.9B | 0.40% | | mC4 - Tamil | 2.55B | 4 | 10.2B | 1.04% | | the Stack - Python | 20.9B | 2 | 41.8B | 4.26% | | the Stack - Javascript | 55.6B | 1 | 55.6B | 5.66% | | the Stack - Shell | 1.2B5 | 2 | 2.5B | 0.26% | | the Stack - SQL | 6.4B | 2 | 12.8B | 1.31% | | the Stack - Markdown | 26.6B | 1 | 26.6B | 2.71% | | RedPajama - StackExchange | 21.2B | 1 | 21.2B | 2.16% | | RedPajama - ArXiv | 30.6B | 1 | 30.6B | 3.12% | ### Additional SEA-LION-Pile (non-mC4) Data Sources This section contains the links to the additional datasets that form the SEA-LION-Pile. - [RefinedWeb](https://huggingface.co/datasets/tiiuae/falcon-refinedweb) - [the Stack (Python, Javascript, Shell, SQL, Markdown)](https://huggingface.co/datasets/bigcode/the-stack-dedup) - [RedPajama (StackExchange, ArXiv)](https://huggingface.co/datasets/togethercomputer/RedPajama-Data-1T) - WangChanBERTa - [scb_mt_enth_2020](https://huggingface.co/datasets/scb_mt_enth_2020) - [prachathai67k](https://huggingface.co/datasets/prachathai67k) - [thaisum](https://huggingface.co/datasets/thaisum) - [Opus - bible-uedin](https://opus.nlpl.eu/bible-uedin.php) - [Opus - Tanzil](https://opus.nlpl.eu/Tanzil.php) - [Opus - Opensubtitles](https://opus.nlpl.eu/OpenSubtitles-v2018.php) - [Opus - QED](https://opus.nlpl.eu/QED.php) - [Opus - Ted2020](https://opus.nlpl.eu/TED2020.php) - [Opus - Oscar](https://oscar-project.org/post/news-23-01) ### Limitations - As toxic or biased data is prevalent on the internet, it is likely our dataset contains such content. - Despite our best efforts to filter content that does not qualify as natural language, and to deduplicate documents, our pipeline may let through documents that may be considered as errors or redundant. ### License This public extract of mC4 is made available under [ODC-By 1.0](https://opendatacommons.org/licenses/by/1-0/) license; users should also abide to the [CommonCrawl ToU](https://commoncrawl.org/terms-of-use/). For all other licenses, please refer to their individual pages above. ## References ```bibtex @misc{lowphansirikul2021wangchanberta, title={WangchanBERTa: Pretraining transformer-based Thai Language Models}, author={Lalita Lowphansirikul and Charin Polpanumas and Nawat Jantrakulchai and Sarana Nutanong}, year={2021}, eprint={2101.09635}, archivePrefix={arXiv}, primaryClass={cs.CL} } @article{refinedweb, title={The {R}efined{W}eb dataset for {F}alcon {LLM}: outperforming curated corpora with web data, and web data only}, author={Guilherme Penedo and Quentin Malartic and Daniel Hesslow and Ruxandra Cojocaru and Alessandro Cappelli and Hamza Alobeidli and Baptiste Pannier and Ebtesam Almazrouei and Julien Launay}, journal={arXiv preprint arXiv:2306.01116}, eprint={2306.01116}, eprinttype = {arXiv}, url={https://arxiv.org/abs/2306.01116}, year={2023} } @article{Kocetkov2022TheStack, title={The Stack: 3 TB of permissively licensed source code}, author={Kocetkov, Denis and Li, Raymond and Ben Allal, Loubna and Li, Jia and Mou,Chenghao and Muñoz Ferrandis, Carlos and Jernite, Yacine and Mitchell, Margaret and Hughes, Sean and Wolf, Thomas and Bahdanau, Dzmitry and von Werra, Leandro and de Vries, Harm}, journal={Preprint}, year={2022} } @software{together2023redpajama, author = {Together Computer}, title = {RedPajama: An Open Source Recipe to Reproduce LLaMA training dataset}, month = April, year = 2023, url = {https://github.com/togethercomputer/RedPajama-Data} } ```
aisingapore/sea-lion-pile
[ "language:zh", "language:vi", "language:id", "language:ms", "language:tl", "language:my", "language:th", "language:lo", "language:km", "language:ta", "license:other", "arxiv:2101.09635", "arxiv:2306.01116", "region:us" ]
2024-01-25T06:10:44+00:00
{"language": ["zh", "vi", "id", "ms", "tl", "my", "th", "lo", "km", "ta"], "license": "other", "license_name": "other", "license_link": "LICENSE"}
2024-02-16T03:26:11+00:00
[ "2101.09635", "2306.01116" ]
[ "zh", "vi", "id", "ms", "tl", "my", "th", "lo", "km", "ta" ]
TAGS #language-Chinese #language-Vietnamese #language-Indonesian #language-Malay (macrolanguage) #language-Tagalog #language-Burmese #language-Thai #language-Lao #language-Khmer #language-Tamil #license-other #arxiv-2101.09635 #arxiv-2306.01116 #region-us
SEA-LION-Pile ============= SEA-LION-Pile is the pretraining data set for SEA-LION, a collection of Large Language Models (LLMs) which has been pretrained and instruct-tuned for the Southeast Asia (SEA) region. This repository contains the cleaned mC4 portion of the SEA-LION-Pile. For the remainder of the SEA-LION-Pile dataset, they may be downloaded from the links provided below. Dataset Details --------------- SEA-LION was trained on 980B tokens of the following data: ### Additional SEA-LION-Pile (non-mC4) Data Sources This section contains the links to the additional datasets that form the SEA-LION-Pile. * RefinedWeb * the Stack (Python, Javascript, Shell, SQL, Markdown) * RedPajama (StackExchange, ArXiv) * WangChanBERTa + scb\_mt\_enth\_2020 + prachathai67k + thaisum + Opus - bible-uedin + Opus - Tanzil + Opus - Opensubtitles + Opus - QED + Opus - Ted2020 + Opus - Oscar ### Limitations * As toxic or biased data is prevalent on the internet, it is likely our dataset contains such content. * Despite our best efforts to filter content that does not qualify as natural language, and to deduplicate documents, our pipeline may let through documents that may be considered as errors or redundant. ### License This public extract of mC4 is made available under ODC-By 1.0 license; users should also abide to the CommonCrawl ToU. For all other licenses, please refer to their individual pages above. References ----------
[ "### Additional SEA-LION-Pile (non-mC4) Data Sources\n\n\nThis section contains the links to the additional datasets that form the SEA-LION-Pile.\n\n\n* RefinedWeb\n* the Stack (Python, Javascript, Shell, SQL, Markdown)\n* RedPajama (StackExchange, ArXiv)\n* WangChanBERTa\n\t+ scb\\_mt\\_enth\\_2020\n\t+ prachathai67k\n\t+ thaisum\n\t+ Opus - bible-uedin\n\t+ Opus - Tanzil\n\t+ Opus - Opensubtitles\n\t+ Opus - QED\n\t+ Opus - Ted2020\n\t+ Opus - Oscar", "### Limitations\n\n\n* As toxic or biased data is prevalent on the internet, it is likely our dataset contains such content.\n* Despite our best efforts to filter content that does not qualify as natural language, and to deduplicate documents, our pipeline may let through documents that may be considered as errors or redundant.", "### License\n\n\nThis public extract of mC4 is made available under ODC-By 1.0 license; users should also abide to the CommonCrawl ToU.\n\n\nFor all other licenses, please refer to their individual pages above.\n\n\nReferences\n----------" ]
[ "TAGS\n#language-Chinese #language-Vietnamese #language-Indonesian #language-Malay (macrolanguage) #language-Tagalog #language-Burmese #language-Thai #language-Lao #language-Khmer #language-Tamil #license-other #arxiv-2101.09635 #arxiv-2306.01116 #region-us \n", "### Additional SEA-LION-Pile (non-mC4) Data Sources\n\n\nThis section contains the links to the additional datasets that form the SEA-LION-Pile.\n\n\n* RefinedWeb\n* the Stack (Python, Javascript, Shell, SQL, Markdown)\n* RedPajama (StackExchange, ArXiv)\n* WangChanBERTa\n\t+ scb\\_mt\\_enth\\_2020\n\t+ prachathai67k\n\t+ thaisum\n\t+ Opus - bible-uedin\n\t+ Opus - Tanzil\n\t+ Opus - Opensubtitles\n\t+ Opus - QED\n\t+ Opus - Ted2020\n\t+ Opus - Oscar", "### Limitations\n\n\n* As toxic or biased data is prevalent on the internet, it is likely our dataset contains such content.\n* Despite our best efforts to filter content that does not qualify as natural language, and to deduplicate documents, our pipeline may let through documents that may be considered as errors or redundant.", "### License\n\n\nThis public extract of mC4 is made available under ODC-By 1.0 license; users should also abide to the CommonCrawl ToU.\n\n\nFor all other licenses, please refer to their individual pages above.\n\n\nReferences\n----------" ]
[ 83, 150, 75, 53 ]
[ "passage: TAGS\n#language-Chinese #language-Vietnamese #language-Indonesian #language-Malay (macrolanguage) #language-Tagalog #language-Burmese #language-Thai #language-Lao #language-Khmer #language-Tamil #license-other #arxiv-2101.09635 #arxiv-2306.01116 #region-us \n### Additional SEA-LION-Pile (non-mC4) Data Sources\n\n\nThis section contains the links to the additional datasets that form the SEA-LION-Pile.\n\n\n* RefinedWeb\n* the Stack (Python, Javascript, Shell, SQL, Markdown)\n* RedPajama (StackExchange, ArXiv)\n* WangChanBERTa\n\t+ scb\\_mt\\_enth\\_2020\n\t+ prachathai67k\n\t+ thaisum\n\t+ Opus - bible-uedin\n\t+ Opus - Tanzil\n\t+ Opus - Opensubtitles\n\t+ Opus - QED\n\t+ Opus - Ted2020\n\t+ Opus - Oscar### Limitations\n\n\n* As toxic or biased data is prevalent on the internet, it is likely our dataset contains such content.\n* Despite our best efforts to filter content that does not qualify as natural language, and to deduplicate documents, our pipeline may let through documents that may be considered as errors or redundant.### License\n\n\nThis public extract of mC4 is made available under ODC-By 1.0 license; users should also abide to the CommonCrawl ToU.\n\n\nFor all other licenses, please refer to their individual pages above.\n\n\nReferences\n----------" ]
[ -0.03212283179163933, 0.19933202862739563, -0.006876617670059204, 0.023275498300790787, 0.09798120707273483, -0.09528481215238571, -0.0020412078592926264, 0.11238057911396027, 0.07324754446744919, 0.04516061022877693, -0.02820742130279541, 0.05820576474070549, 0.08482219278812408, 0.04357942193746567, -0.019211018458008766, -0.23910865187644958, 0.03266581892967224, -0.03992848098278046, 0.017444103956222534, 0.09728727489709854, 0.0964261069893837, 0.009870246984064579, 0.09345915913581848, 0.035671401768922806, 0.08854351937770844, 0.07067607343196869, -0.011819083243608475, -0.03179924562573433, 0.005793461576104164, 0.04458252340555191, 0.030034950003027916, 0.016602061688899994, -0.046338099986314774, -0.21113868057727814, 0.009697429835796356, 0.059412773698568344, -0.02672674134373665, -0.03532502055168152, 0.15692347288131714, 0.0065661100670695305, 0.1139901727437973, -0.130430206656456, -0.0010502979857847095, 0.0946379229426384, -0.07551883161067963, -0.16992203891277313, -0.09090932458639145, 0.0373455248773098, 0.10551230609416962, 0.13222450017929077, -0.1002245843410492, 0.18584443628787994, -0.057909559458494186, 0.08308925479650497, 0.057945724576711655, -0.2676751911640167, -0.005273597780615091, -0.010271367616951466, 0.09074077010154724, 0.049527522176504135, -0.05620317533612251, -0.005661063361912966, 0.012450147420167923, -0.00433536758646369, -0.05914164334535599, -0.10728787630796432, 0.06276072561740875, -0.07870638370513916, -0.09766298532485962, 0.0039323484525084496, 0.11807838082313538, 0.014777946285903454, -0.09233810752630234, -0.0729270949959755, -0.04403999075293541, -0.05470062419772148, 0.03853631392121315, 0.0039035663940012455, 0.03072565235197544, 0.07988352328538895, 0.2118188589811325, -0.06288490444421768, -0.10593929141759872, 0.04005226865410805, -0.029642852023243904, 0.043634671717882156, 0.05570796877145767, 0.019036928191781044, -0.005832316819578409, 0.03992366045713425, -0.017033590003848076, -0.11624912917613983, -0.04705434292554855, -0.06711414456367493, -0.1096625030040741, 0.07202662527561188, 0.052598003298044205, 0.0215586107224226, 0.03131745010614395, 0.15227003395557404, -0.0683792307972908, 0.0926780104637146, -0.08201444894075394, 0.0327109768986702, 0.021920841187238693, 0.09878946840763092, -0.0714573934674263, 0.037504762411117554, 0.0757301077246666, -0.06488139182329178, 0.038081321865320206, 0.04411398991942406, 0.024594346061348915, 0.011423611082136631, -0.02874051220715046, 0.11220494657754898, 0.04326580837368965, 0.056022223085165024, -0.09031763672828674, -0.03907829895615578, 0.22636020183563232, -0.15023131668567657, 0.03050493635237217, 0.031068770214915276, -0.03170790523290634, 0.08266511559486389, 0.041864171624183655, 0.058775655925273895, -0.12545523047447205, 0.06589946150779724, 0.009222808293998241, 0.04605100303888321, -0.047411199659109116, -0.07374877482652664, 0.02299831248819828, -0.05878010392189026, -0.059091635048389435, -0.0631311759352684, -0.06421646475791931, -0.05483032390475273, -0.008847621269524097, -0.02140101045370102, -0.04050306975841522, 0.08557835966348648, -0.01630365289747715, -0.03712859004735947, 0.028858637437224388, -0.0874384343624115, -0.07777876406908035, 0.03493959829211235, -0.010465922765433788, 0.027118362486362457, -0.013210313394665718, 0.002810038859024644, -0.10288523137569427, 0.05512648820877075, -0.0909697487950325, 0.11638876795768738, -0.1403006613254547, 0.0017445054836571217, -0.08776912838220596, -0.04660899564623833, 0.004803384654223919, 0.01847207546234131, -0.06606487929821014, 0.09609081596136093, -0.20818297564983368, -0.0012876298278570175, 0.2079618275165558, -0.15193237364292145, -0.021331027150154114, 0.16466577351093292, 0.038072772324085236, -0.06736351549625397, 0.017520684748888016, 0.09498299658298492, 0.0826360285282135, -0.16225430369377136, -0.06930167973041534, -0.02012539468705654, -0.024541597813367844, 0.15707847476005554, 0.11263014376163483, -0.07188715785741806, 0.1312701404094696, 0.004544900264590979, -0.012879074551165104, -0.04933447018265724, 0.04471099376678467, -0.0705893263220787, -0.007790581788867712, 0.004220911767333746, -0.08722541481256485, 0.005267397500574589, -0.013264607638120651, -0.023138223215937614, -0.0937875360250473, -0.028948362916707993, 0.06540466099977493, 0.032249875366687775, 0.026686346158385277, -0.1211957260966301, 0.015370785258710384, 0.08061452955007553, 0.0012016575783491135, -0.17581695318222046, 0.0278251264244318, 0.04742185026407242, -0.09423884749412537, 0.09741698205471039, -0.023478126153349876, -0.0030753668397665024, 0.025035684928297997, -0.021672088652849197, -0.004100358113646507, 0.05705837160348892, -0.022236816585063934, -0.000767452351283282, -0.07664627581834793, 0.07688876241445541, -0.012827380560338497, 0.1660081446170807, -0.18601630628108978, 0.014071804471313953, 0.18901830911636353, 0.12774153053760529, 0.03236651420593262, -0.02196786366403103, 0.09014231711626053, 0.047807492315769196, 0.02989955246448517, -0.040180888026952744, -0.010721526108682156, 0.0045383949764072895, -0.07902852445840836, 0.1388050615787506, -0.1293717473745346, 0.058279406279325485, 0.12023334950208664, -0.04860479012131691, -0.03760514035820961, 0.02922176942229271, -0.038598909974098206, -0.05592355504631996, -0.036522381007671356, 0.003038740949705243, 0.14144539833068848, 0.027835743501782417, 0.09309512376785278, -0.09198258817195892, -0.01052532996982336, -0.02832251414656639, -0.09275741130113602, 0.0027137829456478357, 0.15509824454784393, 0.045007940381765366, -0.28713956475257874, 0.1102604791522026, -0.03600478544831276, 0.009893213398754597, 0.13409188389778137, -0.03153824433684349, -0.05075746774673462, 0.0020955910440534353, 0.13658873736858368, 0.0636839047074318, -0.018233751878142357, 0.0033955350518226624, 0.047844626009464264, 0.03703117370605469, -0.0017068097367882729, 0.011238389648497105, -0.13301508128643036, -0.0028396262787282467, -0.020137742161750793, -0.03514048829674721, -0.02406824566423893, 0.07314243167638779, 0.0175650455057621, 0.04334452748298645, -0.06143384054303169, 0.09399399161338806, -0.021213645115494728, -0.05202126130461693, -0.1208135113120079, 0.11817146092653275, -0.14813971519470215, -0.3231205940246582, -0.09537497907876968, -0.013953734189271927, -0.058709390461444855, -0.01914409175515175, 0.09899482131004333, -0.11105500906705856, -0.12775927782058716, -0.08077108860015869, -0.09099812060594559, 0.005637831520289183, -0.10504622012376785, -0.1388949304819107, 0.0677112564444542, 0.05427112430334091, -0.07165142893791199, -0.006105997134000063, 0.04124324396252632, -0.10925494879484177, 0.04285065829753876, -0.06192003935575485, 0.05785171315073967, 0.014222190715372562, 0.025504887104034424, -0.02442682534456253, -0.00048005959251895547, 0.18125192821025848, -0.10833455622196198, 0.024714501574635506, 0.13576996326446533, -0.05156082659959793, 0.11642184853553772, 0.14724387228488922, 0.03290187940001488, -0.04316042363643646, 0.04458381608128548, 0.1283293068408966, 0.0038964725099503994, -0.32014116644859314, -0.09049834311008453, -0.08820366114377975, 0.0062572951428592205, -0.02740943618118763, 0.11426104605197906, -0.01201796717941761, 0.014410111121833324, -0.12280990928411484, 0.07537747919559479, 0.05657330900430679, 0.07839860767126083, 0.12584687769412994, 0.03935752063989639, 0.06479205191135406, -0.0829806700348854, 0.013655804097652435, 0.11201263964176178, 0.04000668600201607, 0.1533135026693344, 0.06359757483005524, 0.17929430305957794, 0.11389081180095673, 0.16800521314144135, 0.09423834085464478, -0.07836245745420456, -0.017363466322422028, 0.04258812218904495, 0.004766262602061033, -0.1260228008031845, 0.0477324016392231, 0.07850538939237595, 0.02916734851896763, -0.06120137497782707, 0.06712768971920013, 0.09025881439447403, 0.06648682057857513, 0.19634686410427094, -0.06736622005701065, 0.01651468127965927, 0.04024490714073181, 0.02746022492647171, -0.005861181765794754, -0.02615169994533062, 0.047246143221855164, 0.01345142349600792, -0.14078015089035034, 0.13736121356487274, -0.01456989161670208, 0.13450506329536438, -0.1253460943698883, -0.003065982135012746, 0.007730606943368912, -0.04685936123132706, 0.021737169474363327, 0.08527297526597977, -0.1658657044172287, 0.19360853731632233, 0.035996537655591965, -0.00033620893373154104, -0.014209708198904991, 0.022072549909353256, 0.0448177233338356, 0.05959325283765793, 0.13306646049022675, 0.02974518947303295, -0.056329768151044846, -0.09939806163311005, -0.08070556074380875, 0.008911608718335629, 0.08504818379878998, 0.01601456105709076, 0.02459581196308136, 0.0011931865010410547, -0.0343816876411438, -0.096043661236763, 0.05292750522494316, -0.21434010565280914, -0.14971064031124115, 0.09429889917373657, -0.008750771172344685, -0.014538770541548729, -0.03751876577734947, -0.021216729655861855, -0.027578003704547882, 0.2508225739002228, -0.20554175972938538, -0.12836109101772308, -0.0876816138625145, -0.11518603563308716, 0.10245831310749054, -0.08574581146240234, 0.05429499223828316, -0.03369225189089775, 0.023788997903466225, 0.005302914883941412, 0.007662807125598192, 0.10687847435474396, -0.03472069278359413, -0.14214766025543213, -0.026126578450202942, 0.13785651326179504, -0.014938808977603912, 0.03274960815906525, -0.000792940438259393, 0.008881093934178352, 0.006800077389925718, -0.16301654279232025, -0.04736148566007614, 0.03637486696243286, 0.057234399020671844, 0.1388663351535797, -0.07298565655946732, -0.17124441266059875, -0.1466769129037857, -0.15185053646564484, 0.03780762851238251, 0.24194510281085968, -0.020123589783906937, 0.08093573898077011, 0.10806453227996826, -0.08803801238536835, -0.11013898253440857, -0.08147116005420685, 0.019699709489941597, -0.013821450993418694, 0.03982994332909584, -0.15252164006233215, 0.009317928925156593, 0.10810288786888123, -0.029907794669270515, 0.014679902233183384, -0.12359875440597534, -0.12555280327796936, 0.030299223959445953, -0.002609338378533721, -0.1369408369064331, -0.21842755377292633, -0.11940012127161026, -0.04681248217821121, -0.22640222311019897, 0.10801781713962555, -0.10147865861654282, 0.034205541014671326, 0.03414043411612511, -0.056348226964473724, 0.009522764943540096, 0.0005869735032320023, 0.19037027657032013, -0.07971318811178207, 0.003208950161933899, -0.08344050496816635, 0.012880833819508553, -0.05434264987707138, 0.04627669230103493, 0.07976236939430237, -0.05320729315280914, -0.0063562472350895405, -0.15341627597808838, -0.044502973556518555, -0.026413237676024437, 0.034271325916051865, -0.038140635937452316, -0.034599948674440384, -0.07130750268697739, 0.06939133256673813, 0.04656452685594559, 0.004756342153996229, 0.06487659364938736, -0.08158014714717865, -0.067753367125988, 0.1516781449317932, 0.14610061049461365, 0.07882367074489594, -0.09864737838506699, -0.09103180468082428, -0.015102897770702839, 0.06740380823612213, -0.02507045306265354, 0.02875468321144581, 0.09277313202619553, -0.0014673796249553561, 0.15586882829666138, -0.05051460117101669, -0.12417285144329071, 0.09404224902391434, 0.043504923582077026, -0.06522940844297409, -0.13628193736076355, -0.02950785681605339, 0.051596496254205704, 0.09246718883514404, -0.10136085003614426, 0.054953109472990036, -0.0507294200360775, -0.03464437276124954, 0.05534753575921059, 0.03081517107784748, 0.008871281519532204, 0.07429283857345581, -0.02306276559829712, 0.03214189410209656, -0.07705315202474594, 0.11269478499889374, 0.1388758420944214, -0.042486898601055145, -0.015072175301611423, 0.21517157554626465, -0.09969452023506165, -0.05760636180639267, -0.11202444136142731, -0.003319493727758527, 0.08385695517063141, -0.06627096980810165, 0.006579107604920864, -0.06637603044509888, -0.026926640421152115, 0.025373706594109535, 0.022350216284394264, 0.02587313763797283, 0.042580269277095795, -0.08181900531053543, 0.016716567799448967, 0.03568254038691521, 0.1027718111872673, -0.039894118905067444, -0.1096668541431427, -0.00941547006368637, 0.09844611585140228, 0.014845103025436401, 0.01821797527372837, -0.04332877695560455, -0.10275960713624954, 0.001981993205845356, -0.16551581025123596, 0.1428869068622589, -0.06757477670907974, 0.0018631775164976716, -0.055309318006038666, -0.07237420976161957, -0.055421072989702225, 0.013145914301276207, -0.05271616205573082, -0.058251719921827316, -0.026100678369402885, 0.09530892223119736, -0.14456957578659058, -0.04880506917834282, 0.13869839906692505, -0.015190777368843555, 0.035566214472055435, -0.002486460842192173, -0.08934333920478821, 0.0031124872621148825, -0.08689317852258682, 0.015515862964093685, 0.08547604084014893, 0.05473848804831505, 0.022716879844665527, -0.074898362159729, 0.0037767046596854925, 0.029041385278105736, -0.008409041911363602, 0.001150169293396175, 0.09031183272600174, -0.14264816045761108, -0.09376876801252365, -0.038070835173130035, -0.0508204810321331, -0.030671915039420128, 0.014461319893598557, 0.08293934166431427, 0.053097501397132874, 0.08364196866750717, -0.06380292773246765, 0.06117886304855347, -0.11559412628412247, -0.014187504537403584, 0.005828713066875935, 0.01951960101723671, -0.0008174236863851547, 0.005407021846622229, 0.052109699696302414, -0.009671326726675034, 0.18684542179107666, -0.004130840301513672, 0.050461530685424805, 0.07079820334911346, -0.035087499767541885, -0.04493601992726326, 0.016286740079522133, 0.05604276433587074, 0.059479039162397385, 0.024374213069677353, -0.0038204265292733908, -0.12606561183929443, -0.07995850592851639, 0.0358770377933979, 0.010219869203865528, 0.15121647715568542, 0.10635094344615936, 0.0058198790065944195, -0.0013125857803970575, -0.02790250815451145, -0.12496346980333328, -0.09507932513952255, -0.02773275226354599, 0.06530701369047165, -0.04915627837181091, 0.050065264105796814, 0.1780387908220291, -0.10683737695217133, 0.09724459797143936, 0.02175006829202175, -0.044093966484069824, -0.1462426781654358, -0.20195461809635162, -0.06016974151134491, 0.05823097005486488, 0.00005181342930882238, -0.0772440955042839, 0.11292527616024017, -0.04503459855914116, 0.07393357157707214, -0.033617254346609116, 0.08011674880981445, -0.1096322312951088, -0.14412565529346466, 0.037808261811733246, 0.009760894812643528, 0.05221162363886833, 0.023529091849923134, 0.06671084463596344, 0.017287451773881912, -0.03755199536681175, -0.01141447015106678, 0.09823869168758392, -0.03829274699091911, 0.04787600412964821, -0.0826636403799057, -0.05404524505138397, 0.003224174492061138, 0.03311678394675255, 0.05546825751662254, 0.25136205554008484, 0.04789123311638832, -0.0015053650131449103, 0.03448902443051338, 0.1667134314775467, 0.022426635026931763, -0.06984803080558777, -0.09474047273397446, 0.11550731211900711, 0.01786934584379196, -0.03849705308675766, 0.022586120292544365, -0.1171167641878128, 0.04242555424571037, 0.19331544637680054, 0.0874686911702156, -0.020799346268177032, 0.056227438151836395, -0.01718725450336933, 0.019376352429389954, -0.0334242507815361, 0.04058678820729256, 0.05119365081191063, 0.33328449726104736, -0.06704498827457428, 0.039669252932071686, -0.050997622311115265, -0.03306601196527481, -0.20458845794200897, -0.02278858982026577, 0.03476504981517792, 0.00771677540615201, -0.021374432370066643, 0.12107021361589432, -0.10999669879674911, -0.1337195187807083, -0.03160840645432472, -0.025907162576913834, -0.10441386699676514, -0.01524918619543314, -0.07577978819608688, 0.1228422075510025, -0.007058437913656235, 0.011576790362596512, -0.030231066048145294, 0.106867216527462, 0.044016022235155106, -0.06526878476142883, -0.015077107585966587, 0.07027847319841385, 0.013055550865828991, 0.10305599868297577, 0.053592193871736526, 0.16454407572746277, 0.06298848986625671, 0.017334595322608948, -0.07867499440908432, 0.08411969244480133, 0.024733470752835274, -0.0992702767252922, -0.026587756350636482, 0.10367952287197113, -0.010355964303016663, -0.0008850741432979703, 0.07124226540327072, 0.14395681023597717, 0.0734843909740448, 0.028889061883091927, 0.05727190896868706, -0.07650471478700638, 0.14808787405490875, -0.173323854804039, 0.033945631235837936, 0.1727556437253952, -0.008110223338007927, 0.015944991260766983, -0.04249660670757294, 0.0237924475222826, -0.03924887254834175, -0.0910600796341896, -0.014356665313243866, -0.07979511469602585, 0.08406514674425125, 0.12483694404363632, 0.10996778309345245, -0.08961957693099976, -0.05586615577340126, -0.00877667311578989, -0.003239062614738941, -0.09104248881340027, 0.07683950662612915, -0.022926488891243935, 0.02144414186477661, -0.027784859761595726, -0.3159344792366028, 0.011587082408368587, 0.031969234347343445, -0.08173198997974396, -0.03493683785200119 ]
8a8d9c076bc30f2366accfa4dda4a635d6c4b9bb
# Airavata HumanEval Prompts This benchmark contains a set of prompts written by real-users to evaluate LLMs on real-world tasks and test it for different abilities. We collect prompts for 5 abilities listed below: - Long: Ability to generate long-form text like writing essays, speeches, reports, etc. - Fact-Ops: Ability to give factual opinions and explanations like seeking recommendations, seeking advice, opinions, explanations, etc. - Content: Ability to make content accessible like summarizations, layman explanations, etc - Lang-Creativity: Ability to be creative in language like finding anagrams, rhyming words, vocabulary enhancement, etc - Culture: Ability to answer questions related to Indian Culture. For each ability we define a list of intents and domains which are provided to the users along with detailed instructions about what prompts are expected. We recommend the readers to check out our [official blog post](https://ai4bharat.github.io/airavata) for more details. ## Citation ```bibtex @misc{airavata2024, title = {Introducing Airavata: Hindi Instruction-tuned LLM}, url = {https://ai4bharat.github.io/airavata}, author = {Jay Gala and Thanmay Jayakumar and Jaavid Aktar Husain and Aswanth Kumar and Mohammed Safi Ur Rahman Khan and Diptesh Kanojia and Ratish Puduppully and Mitesh Khapra and Raj Dabre and Rudra Murthy and Anoop Kunchukuttan}, month = {January}, year = {2024} } ```
ai4bharat/human-eval
[ "task_categories:text2text-generation", "annotations_creators:expert-generated", "language_creators:expert-generated", "multilinguality:monolingual", "size_categories:1<n<100", "source_datasets:original", "language:hi", "license:cc-by-4.0", "region:us" ]
2024-01-25T06:16:47+00:00
{"annotations_creators": ["expert-generated"], "language_creators": ["expert-generated"], "language": ["hi"], "license": ["cc-by-4.0"], "multilinguality": ["monolingual"], "size_categories": ["1<n<100"], "source_datasets": ["original"], "task_categories": ["text2text-generation"], "task_ids": [], "pretty_name": "Airavata HumanEval", "language_bcp47": ["hi-IN"], "dataset_info": [{"config_name": "human-eval", "features": [{"name": "id", "dtype": "string"}, {"name": "intent", "dtype": "string"}, {"name": "domain", "dtype": "string"}, {"name": "language", "dtype": "string"}, {"name": "prompt", "dtype": "string"}], "splits": [{"name": "test", "num_bytes": 34114, "num_examples": 50}], "download_size": 21873, "dataset_size": 34114}], "configs": [{"config_name": "human-eval", "data_files": [{"split": "test", "path": "data/test-*"}]}]}
2024-01-25T07:01:45+00:00
[]
[ "hi" ]
TAGS #task_categories-text2text-generation #annotations_creators-expert-generated #language_creators-expert-generated #multilinguality-monolingual #size_categories-1<n<100 #source_datasets-original #language-Hindi #license-cc-by-4.0 #region-us
# Airavata HumanEval Prompts This benchmark contains a set of prompts written by real-users to evaluate LLMs on real-world tasks and test it for different abilities. We collect prompts for 5 abilities listed below: - Long: Ability to generate long-form text like writing essays, speeches, reports, etc. - Fact-Ops: Ability to give factual opinions and explanations like seeking recommendations, seeking advice, opinions, explanations, etc. - Content: Ability to make content accessible like summarizations, layman explanations, etc - Lang-Creativity: Ability to be creative in language like finding anagrams, rhyming words, vocabulary enhancement, etc - Culture: Ability to answer questions related to Indian Culture. For each ability we define a list of intents and domains which are provided to the users along with detailed instructions about what prompts are expected. We recommend the readers to check out our official blog post for more details.
[ "# Airavata HumanEval Prompts\n\nThis benchmark contains a set of prompts written by real-users to evaluate LLMs on real-world tasks and test it for different abilities. We collect prompts for 5 abilities listed below:\n- Long: Ability to generate long-form text like writing essays, speeches, reports, etc.\n- Fact-Ops: Ability to give factual opinions and explanations like seeking recommendations, seeking advice, opinions, explanations, etc.\n- Content: Ability to make content accessible like summarizations, layman explanations, etc\n- Lang-Creativity: Ability to be creative in language like finding anagrams, rhyming words, vocabulary enhancement, etc\n- Culture: Ability to answer questions related to Indian Culture.\n\nFor each ability we define a list of intents and domains which are provided to the users along with detailed instructions about what prompts are expected.\n\nWe recommend the readers to check out our official blog post for more details." ]
[ "TAGS\n#task_categories-text2text-generation #annotations_creators-expert-generated #language_creators-expert-generated #multilinguality-monolingual #size_categories-1<n<100 #source_datasets-original #language-Hindi #license-cc-by-4.0 #region-us \n", "# Airavata HumanEval Prompts\n\nThis benchmark contains a set of prompts written by real-users to evaluate LLMs on real-world tasks and test it for different abilities. We collect prompts for 5 abilities listed below:\n- Long: Ability to generate long-form text like writing essays, speeches, reports, etc.\n- Fact-Ops: Ability to give factual opinions and explanations like seeking recommendations, seeking advice, opinions, explanations, etc.\n- Content: Ability to make content accessible like summarizations, layman explanations, etc\n- Lang-Creativity: Ability to be creative in language like finding anagrams, rhyming words, vocabulary enhancement, etc\n- Culture: Ability to answer questions related to Indian Culture.\n\nFor each ability we define a list of intents and domains which are provided to the users along with detailed instructions about what prompts are expected.\n\nWe recommend the readers to check out our official blog post for more details." ]
[ 82, 218 ]
[ "passage: TAGS\n#task_categories-text2text-generation #annotations_creators-expert-generated #language_creators-expert-generated #multilinguality-monolingual #size_categories-1<n<100 #source_datasets-original #language-Hindi #license-cc-by-4.0 #region-us \n# Airavata HumanEval Prompts\n\nThis benchmark contains a set of prompts written by real-users to evaluate LLMs on real-world tasks and test it for different abilities. We collect prompts for 5 abilities listed below:\n- Long: Ability to generate long-form text like writing essays, speeches, reports, etc.\n- Fact-Ops: Ability to give factual opinions and explanations like seeking recommendations, seeking advice, opinions, explanations, etc.\n- Content: Ability to make content accessible like summarizations, layman explanations, etc\n- Lang-Creativity: Ability to be creative in language like finding anagrams, rhyming words, vocabulary enhancement, etc\n- Culture: Ability to answer questions related to Indian Culture.\n\nFor each ability we define a list of intents and domains which are provided to the users along with detailed instructions about what prompts are expected.\n\nWe recommend the readers to check out our official blog post for more details." ]
[ -0.048036862164735794, -0.09082859009504318, -0.0026299855671823025, 0.0621207021176815, 0.09088636934757233, -0.025395788252353668, 0.029648033902049065, 0.07298415899276733, 0.08256926387548447, 0.09471756219863892, -0.04701331630349159, -0.07311699539422989, 0.033370859920978546, 0.04466933012008667, 0.040080517530441284, -0.16316883265972137, 0.020869337022304535, -0.09579736739397049, 0.11251577734947205, 0.10758870840072632, 0.08133972436189651, -0.041134875267744064, 0.019805574789643288, -0.005187340546399355, 0.01348730456084013, 0.018818866461515427, -0.03706209734082222, -0.07201091945171356, 0.05870578810572624, -0.0034119777847081423, -0.011086685582995415, 0.016125744208693504, -0.04584510251879692, -0.2213503122329712, 0.02388087660074234, 0.009341606870293617, 0.019972888752818108, -0.055172890424728394, 0.01819753088057041, -0.06128576025366783, 0.07762543857097626, -0.02299112267792225, 0.02768237330019474, 0.11266978085041046, -0.13351619243621826, -0.026900310069322586, -0.0234077125787735, -0.004402087535709143, 0.06943800300359726, 0.14767730236053467, -0.05963535234332085, 0.05096971243619919, -0.1156756728887558, -0.03434848040342331, 0.16212111711502075, -0.1308583915233612, -0.02157474122941494, 0.007187843322753906, 0.13377739489078522, 0.13367021083831787, -0.12076931446790695, 0.015189879573881626, 0.03174503520131111, -0.009580455720424652, 0.015063774771988392, -0.06155625730752945, 0.011188452132046223, -0.0017670138040557504, -0.06490734964609146, -0.0208264347165823, 0.18918851017951965, 0.0009059262811206281, -0.0956059917807579, -0.1968153417110443, 0.023848051205277443, 0.1185920462012291, -0.049548596143722534, 0.014110533520579338, -0.010877957567572594, 0.015527306124567986, 0.16155853867530823, -0.11824040859937668, -0.11837568134069443, -0.031847357749938965, 0.088124118745327, 0.04218091070652008, 0.031303275376558304, -0.00641948776319623, -0.07727563381195068, 0.042624156922101974, -0.009504726156592369, -0.10676998645067215, -0.1021459698677063, -0.061186533421278, -0.03302901238203049, 0.03306707739830017, -0.037516843527555466, -0.05403294041752815, 0.039136480540037155, 0.06596976518630981, 0.15451692044734955, 0.0297544002532959, -0.07954586297273636, 0.05155884847044945, 0.029860634356737137, 0.05250665545463562, 0.05210531875491142, -0.004593151621520519, 0.05520046874880791, 0.05392708256840706, 0.03446974977850914, -0.02759050950407982, -0.022995879873633385, 0.03360588476061821, -0.03417622670531273, 0.09858087450265884, 0.02956657111644745, -0.010378433391451836, -0.0976627990603447, -0.0038142800331115723, 0.09854844957590103, -0.12651747465133667, 0.012697873637080193, 0.02259823866188526, -0.12774865329265594, -0.06598072499036789, -0.021995894610881805, -0.04109092056751251, -0.10491486638784409, -0.03213023766875267, -0.02207905612885952, 0.04070526361465454, -0.08603890240192413, -0.07669740170240402, 0.02579713612794876, -0.06501232832670212, -0.03008381649851799, -0.06900541484355927, -0.2256852239370346, -0.09384508430957794, 0.021786483004689217, -0.11256812512874603, -0.021314173936843872, -0.020221853628754616, 0.004052143543958664, -0.0031669456511735916, -0.0021442535798996687, -0.10885543376207352, -0.07257626950740814, 0.06108570098876953, 0.031637392938137054, 0.0788082554936409, 0.026120208203792572, 0.04886288568377495, -0.09149232506752014, 0.007629428990185261, -0.08341243118047714, 0.07420521974563599, -0.11524204164743423, 0.0514708049595356, -0.06816728413105011, 0.019419126212596893, 0.025445258244872093, 0.042389512062072754, -0.0788867324590683, 0.16439899802207947, -0.07225728780031204, -0.016430873423814774, 0.0844726413488388, -0.10537099838256836, -0.12082329392433167, 0.10174804925918579, 0.013011899776756763, 0.20769603550434113, 0.07527297735214233, 0.07006659358739853, 0.14436495304107666, -0.09325656294822693, 0.055568501353263855, -0.1474359780550003, 0.027560334652662277, 0.0656963363289833, 0.01840488612651825, -0.018827328458428383, 0.12544818222522736, 0.015495635569095612, 0.010472076945006847, -0.0342135913670063, 0.04981110617518425, -0.07479682564735413, -0.03787229582667351, -0.048577722162008286, -0.024104053154587746, -0.03453350067138672, 0.008206390775740147, -0.059418752789497375, -0.038247477263212204, -0.10656589269638062, -0.0022639993112534285, -0.013392317108809948, 0.06590885668992996, -0.10095131397247314, 0.06003481522202492, 0.0788736566901207, 0.030655300244688988, -0.14995038509368896, -0.05182474106550217, 0.061046406626701355, -0.04117317125201225, 0.15056781470775604, 0.08661958575248718, 0.02153966762125492, -0.04677775874733925, 0.01539588626474142, 0.018874816596508026, -0.08482304215431213, -0.05061639845371246, -0.035778116434812546, -0.04752988740801811, 0.09841634333133698, -0.035149842500686646, 0.11006975173950195, -0.13613176345825195, 0.02697073109447956, 0.016896022483706474, -0.02298983931541443, 0.06766042113304138, -0.006979331374168396, 0.09883985668420792, 0.03527744486927986, 0.015673870220780373, 0.02674338035285473, 0.015053552575409412, -0.058314498513936996, -0.07559913396835327, 0.06269194185733795, -0.11474086344242096, -0.08204346895217896, 0.0027869499754160643, -0.027933679521083832, -0.026369797065854073, -0.014262114651501179, 0.043317172676324844, -0.05611732602119446, -0.09545302391052246, -0.10095012187957764, 0.11396566033363342, 0.020289605483412743, 0.07044018059968948, -0.06323728710412979, -0.035347118973731995, -0.03492475673556328, -0.017783155664801598, 0.0195373073220253, 0.07866308838129044, -0.0031956348102539778, 0.06214611232280731, 0.054393962025642395, 0.12756061553955078, -0.01216866821050644, 0.09866456687450409, 0.03256775066256523, -0.10845736414194107, 0.08433376252651215, 0.08187012374401093, -0.05560467019677162, 0.1012626439332962, -0.13135160505771637, -0.033129822462797165, 0.04038848355412483, 0.020879432559013367, 0.012261751107871532, -0.07273827493190765, 0.008740134537220001, 0.0036631328985095024, -0.05958811193704605, -0.019816674292087555, 0.025803860276937485, 0.025558248162269592, 0.08341393619775772, 0.01683199778199196, 0.05655839666724205, 0.01853523962199688, -0.08796221017837524, -0.1168215274810791, 0.0721830502152443, -0.04966166242957115, -0.3073294460773468, -0.10258509963750839, -0.005396503489464521, -0.029373491182923317, 0.00037661311216652393, 0.08660388737916946, -0.08836052566766739, -0.09423424303531647, -0.1301158219575882, 0.13963234424591064, 0.0724249854683876, -0.11058983206748962, -0.10650504380464554, 0.02424134686589241, -0.01786138117313385, -0.07568421214818954, 0.045383524149656296, 0.0567844919860363, -0.049550145864486694, 0.062380846589803696, 0.026122592389583588, 0.09161874651908875, 0.07838291674852371, 0.07028783112764359, -0.06134437397122383, -0.035781458020210266, 0.19723019003868103, -0.1428108513355255, 0.11130081862211227, 0.265337198972702, 0.0002823486865963787, 0.05223395302891731, 0.2257792353630066, 0.010925689712166786, -0.08565709739923477, 0.1156807467341423, 0.039368655532598495, -0.0017762590432539582, -0.15330572426319122, -0.07421354204416275, -0.11063194274902344, -0.03247937560081482, 0.03168589621782303, 0.0905652642250061, -0.006351129151880741, 0.042810630053281784, -0.08601432293653488, -0.01587393321096897, 0.05491809919476509, 0.06931180506944656, 0.1728747934103012, 0.007345335092395544, 0.03147248551249504, -0.04921892285346985, -0.0020149569027125835, 0.06252457201480865, 0.06886277347803116, 0.24690866470336914, 0.029341911897063255, 0.226716086268425, 0.04409514740109444, 0.06463402509689331, 0.02545119635760784, -0.007123250048607588, -0.03564691171050072, 0.003683192189782858, -0.006170677486807108, -0.02540902979671955, -0.041784655302762985, 0.10818713903427124, 0.024874472990632057, -0.1089378371834755, -0.0070329997688531876, -0.1109548881649971, 0.061995524913072586, 0.06716615706682205, 0.054618917405605316, 0.02791932411491871, -0.012537376023828983, 0.07875559478998184, -0.08613060414791107, -0.09483002871274948, 0.03289696201682091, 0.07708185166120529, -0.13405722379684448, -0.03553588688373566, 0.005062404554337263, 0.14066839218139648, 0.01910116896033287, -0.02944270521402359, -0.08466853201389313, -0.09696003794670105, 0.025292381644248962, 0.10914536565542221, -0.19532056152820587, 0.10524234920740128, 0.017888840287923813, 0.01987934298813343, -0.14868831634521484, -0.03147929534316063, 0.014644920825958252, -0.08234113454818726, 0.08787646889686584, -0.026612868532538414, -0.042798686772584915, -0.022705670446157455, -0.044508568942546844, 0.03531439229846001, 0.11764691770076752, -0.09506507217884064, 0.12077192217111588, -0.00457486417144537, 0.04350084438920021, -0.0758497342467308, -0.028700903058052063, -0.15913738310337067, -0.15434309840202332, 0.05068223178386688, 0.041673991829156876, 0.0866367444396019, -0.07214611768722534, -0.0022281722631305456, 0.03265983983874321, 0.08609049022197723, -0.1301812380552292, -0.10604792088270187, -0.08652616292238235, -0.03609829396009445, 0.007127557881176472, -0.033997148275375366, 0.04996751993894577, 0.05311036854982376, 0.05910966172814369, 0.00004954243922838941, -0.0005072798230685294, 0.058191556483507156, -0.03607244789600372, -0.11874455213546753, -0.044131599366664886, 0.1300864964723587, 0.1020398661494255, 0.04317713528871536, 0.0024875968229025602, -0.015000277198851109, -0.05647601559758186, -0.0922851413488388, -0.06842315942049026, -0.013200190849602222, -0.10194294154644012, 0.14156895875930786, -0.09688666462898254, -0.05318661034107208, -0.13268467783927917, -0.10517961531877518, 0.1020505428314209, 0.1302664428949356, 0.020668307319283485, 0.20966342091560364, 0.07414314895868301, -0.04327636584639549, -0.23343944549560547, 0.00022165878908708692, -0.03733568266034126, 0.035509299486875534, -0.0009856490651145577, -0.20793914794921875, 0.1001703143119812, -0.006277356296777725, -0.019161811098456383, 0.1625470370054245, -0.20371012389659882, -0.1142369955778122, 0.01681632176041603, 0.04068133234977722, 0.03205365687608719, -0.16439731419086456, -0.04965220019221306, -0.01885596662759781, -0.07898404449224472, 0.032485391944646835, 0.011361796408891678, 0.07260442525148392, -0.05672432854771614, 0.1758008450269699, 0.0011420665541663766, -0.011311166919767857, 0.09833426028490067, -0.017276016995310783, 0.04534140229225159, -0.12485180050134659, 0.1935381293296814, 0.07518994063138962, -0.041152484714984894, 0.15491649508476257, -0.1283561736345291, -0.03528967499732971, -0.14157432317733765, -0.04543393850326538, -0.08798255771398544, 0.05797138810157776, -0.051449161022901535, -0.016939816996455193, -0.14927446842193604, 0.07186442613601685, 0.010641511529684067, 0.017645519226789474, 0.0015096807619556785, -0.11156689375638962, 0.0268742386251688, -0.0014451820170506835, 0.23194976150989532, -0.07549609243869781, -0.13841727375984192, -0.002641977509483695, 0.033430065959692, 0.1115310788154602, -0.163319393992424, 0.03141332417726517, 0.08557138592004776, -0.0009665740071795881, 0.16780202090740204, 0.003980930894613266, -0.06680651754140854, 0.10249710083007812, 0.10503189265727997, 0.014410103671252728, -0.20410890877246857, 0.004180107731372118, 0.12323524057865143, -0.1145016998052597, -0.10570470988750458, 0.0985187217593193, -0.0694439485669136, -0.012483048252761364, 0.010388593189418316, 0.06077128276228905, 0.055780697613954544, 0.08991743624210358, 0.007733091711997986, 0.07096739858388901, -0.02166801318526268, 0.04426417872309685, 0.08464515954256058, -0.1981646865606308, -0.0366169773042202, 0.21307937800884247, -0.06741704791784286, -0.07830995321273804, -0.014418973587453365, -0.0869472473859787, -0.007734717335551977, 0.008998643606901169, 0.02427019737660885, -0.1021583080291748, -0.007588170003145933, 0.12862715125083923, -0.01584087498486042, -0.019624456763267517, 0.034507788717746735, -0.014313275925815105, 0.020979665219783783, 0.04721938446164131, -0.009674746543169022, -0.02828284353017807, -0.06275458633899689, -0.1551596224308014, 0.05359369143843651, 0.05874994397163391, -0.03442976251244545, -0.07052500545978546, -0.09565536677837372, 0.05225774645805359, -0.1344500035047531, 0.06140941008925438, -0.10484156757593155, -0.0007835603319108486, 0.006197439040988684, -0.00889976043254137, -0.010450887493789196, 0.023367512971162796, -0.0832107663154602, -0.08458419144153595, -0.015514066442847252, 0.1249774768948555, -0.13578668236732483, 0.01753074675798416, 0.13787424564361572, -0.07011725753545761, 0.07031488418579102, 0.023195400834083557, -0.1491074115037918, 0.035555049777030945, -0.11155547946691513, 0.04257041588425636, -0.005659915506839752, 0.05139092355966568, -0.020392362028360367, -0.0737878680229187, 0.004543194081634283, 0.017219116911292076, -0.00610304344445467, 0.025855962187051773, 0.05286529287695885, -0.10005483776330948, 0.0016998805804178119, -0.0512702614068985, -0.037402112036943436, -0.03608550503849983, -0.017429150640964508, -0.029469966888427734, 0.01891002245247364, 0.1190575584769249, -0.0601569227874279, 0.0702994242310524, -0.04270704835653305, 0.005917928647249937, 0.023752009496092796, 0.07545170187950134, 0.007173688616603613, -0.060185451060533524, 0.059831563383340836, -0.017947323620319366, 0.14140626788139343, -0.0854671448469162, -0.045380182564258575, 0.06557614356279373, 0.053257256746292114, -0.009744847193360329, -0.028479626402258873, 0.02115303836762905, 0.0804196298122406, -0.014215840958058834, 0.05809333547949791, -0.07403705269098282, -0.01602981425821781, 0.022679535672068596, 0.17634795606136322, 0.1934245526790619, 0.08911971002817154, 0.0785536989569664, 0.015884792432188988, -0.05409292131662369, 0.011993341147899628, 0.03504198044538498, -0.000985890394076705, 0.10972602665424347, -0.07123342156410217, 0.03720680624246597, 0.18378540873527527, -0.02774023450911045, 0.05551972985267639, -0.04698460176587105, -0.03458157554268837, 0.010789353400468826, -0.08552943915128708, -0.04942014068365097, -0.07759789377450943, 0.021562183275818825, -0.0904504805803299, 0.02931912988424301, 0.03422437235713005, 0.04157144948840141, -0.020997006446123123, 0.19421540200710297, -0.06736777722835541, -0.15842631459236145, 0.09113239496946335, -0.029168561100959778, 0.08960834890604019, 0.07883334904909134, 0.0801696628332138, -0.05457701161503792, 0.036656927317380905, 0.023888202384114265, 0.12698572874069214, -0.02872309647500515, -0.02398899383842945, -0.2037624716758728, -0.05939553305506706, -0.0008292848942801356, 0.0835968554019928, 0.018036382272839546, 0.21608495712280273, 0.03732689097523689, -0.061964135617017746, -0.019994467496871948, 0.2643193006515503, -0.023101918399333954, -0.10678353160619736, -0.09463483095169067, 0.05777500942349434, 0.010984959080815315, -0.11269006878137589, -0.07403846830129623, -0.14606381952762604, 0.0065902830101549625, 0.24319161474704742, 0.1179826632142067, -0.13585536181926727, 0.028686655685305595, -0.10921990126371384, 0.034857697784900665, -0.009605670347809792, 0.06646541506052017, 0.06214124336838722, 0.2736397385597229, -0.046798061579465866, -0.010781340301036835, -0.029309038072824478, -0.05040135607123375, -0.19205166399478912, 0.1359955370426178, -0.049465611577034, 0.00546844769269228, -0.11542048305273056, 0.14261625707149506, -0.09620442986488342, -0.047325633466243744, -0.016169626265764236, -0.008384648710489273, -0.09402072429656982, -0.026431189849972725, 0.0384681336581707, -0.005228808149695396, 0.02767820656299591, 0.03767956420779228, 0.000041724819311639294, -0.012480097822844982, 0.024438930675387383, -0.013162452727556229, -0.02377394773066044, 0.13573656976222992, -0.04526769742369652, 0.13938306272029877, 0.05472617968916893, 0.07145247608423233, 0.04947444051504135, 0.008166681975126266, -0.1042819619178772, 0.10879923403263092, 0.09903194010257721, -0.02811696007847786, 0.013104451820254326, 0.2634052038192749, 0.04922188073396683, 0.06757670640945435, 0.16238367557525635, -0.01773156225681305, 0.02306576631963253, 0.05091479793190956, 0.05390775203704834, -0.12937112152576447, 0.043382205069065094, -0.10979197174310684, 0.1272023767232895, 0.09954434633255005, -0.05694885179400444, -0.05292797088623047, -0.0244346484541893, -0.035581398755311966, -0.0574895404279232, 0.032094359397888184, -0.06885626167058945, -0.12347325682640076, -0.02185172401368618, 0.02288946323096752, -0.0055770366452634335, -0.2585178315639496, -0.044544268399477005, 0.052478719502687454, 0.06262987107038498, 0.02578437328338623, 0.14658693969249725, 0.07488870620727539, -0.02162640355527401, -0.014302116818726063, -0.3118957579135895, 0.05747620016336441, 0.10931111872196198, -0.06870106607675552, -0.028129834681749344 ]
821307c5ece5265b3df5764abf2655abd9e85435
# Dataset Card for "lmind_nq_v1_qa" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
tyzhu/lmind_nq_v1_qa
[ "region:us" ]
2024-01-25T06:22:04+00:00
{"configs": [{"config_name": "default", "data_files": [{"split": "train_qa", "path": "data/train_qa-*"}, {"split": "train_recite_qa", "path": "data/train_recite_qa-*"}, {"split": "eval_qa", "path": "data/eval_qa-*"}, {"split": "eval_recite_qa", "path": "data/eval_recite_qa-*"}, {"split": "all_docs", "path": "data/all_docs-*"}, {"split": "train", "path": "data/train-*"}, {"split": "validation", "path": "data/validation-*"}]}], "dataset_info": {"features": [{"name": "answers", "struct": [{"name": "answer_start", "sequence": "null"}, {"name": "text", "sequence": "string"}]}, {"name": "inputs", "dtype": "string"}, {"name": "targets", "dtype": "string"}], "splits": [{"name": "train_qa", "num_bytes": 34574, "num_examples": 300}, {"name": "train_recite_qa", "num_bytes": 222533, "num_examples": 300}, {"name": "eval_qa", "num_bytes": 11254, "num_examples": 100}, {"name": "eval_recite_qa", "num_bytes": 73368, "num_examples": 100}, {"name": "all_docs", "num_bytes": 248990, "num_examples": 392}, {"name": "train", "num_bytes": 34574, "num_examples": 300}, {"name": "validation", "num_bytes": 11254, "num_examples": 100}], "download_size": 0, "dataset_size": 636547}}
2024-01-26T07:39:29+00:00
[]
[]
TAGS #region-us
# Dataset Card for "lmind_nq_v1_qa" More Information needed
[ "# Dataset Card for \"lmind_nq_v1_qa\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"lmind_nq_v1_qa\"\n\nMore Information needed" ]
[ 6, 20 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"lmind_nq_v1_qa\"\n\nMore Information needed" ]
[ -0.13993552327156067, 0.13011933863162994, -0.002787754638120532, 0.04922997206449509, 0.02730904333293438, 0.0927998498082161, 0.051039353013038635, 0.11882985383272171, 0.18411633372306824, -0.029461683705449104, 0.1257082223892212, 0.003186528803780675, 0.06286588311195374, 0.129492849111557, 0.0015670587308704853, -0.0474538691341877, 0.035138197243213654, 0.013945207931101322, -0.0968901589512825, 0.03525470942258835, 0.007856469601392746, -0.04077616706490517, 0.06514709442853928, -0.07942566275596619, -0.13258497416973114, 0.06992363929748535, -0.03807358071208, -0.029852040112018585, 0.07794023305177689, -0.024751273915171623, 0.09945545345544815, 0.06468244642019272, 0.0479041188955307, -0.22424441576004028, 0.030724218115210533, -0.03197203949093819, -0.11916420608758926, 0.010205154307186604, -0.0007887229439802468, -0.083133265376091, -0.11602665483951569, 0.0016496782191097736, -0.05088167265057564, 0.02370716817677021, -0.11354303359985352, -0.1952705979347229, -0.09500815719366074, 0.024938445538282394, -0.023347364738583565, -0.030421989038586617, 0.07408691942691803, 0.08320604264736176, -0.13521601259708405, 0.07867774367332458, 0.09839432686567307, -0.18642038106918335, 0.07476066052913666, 0.1306360363960266, -0.061677634716033936, 0.11058250814676285, -0.004911068361252546, 0.06392782926559448, 0.09967245161533356, -0.05405069887638092, -0.06860275566577911, -0.036948323249816895, -0.11133109778165817, 0.14600560069084167, -0.049653101712465286, -0.08071279525756836, 0.2613115608692169, 0.04547257348895073, 0.02460585907101631, 0.11137405037879944, -0.014396675862371922, -0.15471667051315308, 0.020260365679860115, 0.07372049987316132, 0.009748294949531555, -0.0021721532102674246, 0.07091883569955826, 0.013107533566653728, -0.0770578384399414, -0.14780893921852112, -0.173723042011261, 0.01585562899708748, -0.00710220355540514, 0.16229040920734406, -0.17902772128582, -0.012188426218926907, -0.1555195450782776, -0.030601073056459427, -0.0556340366601944, -0.10273447632789612, -0.04199153557419777, 0.017550988122820854, 0.014597741886973381, 0.0605417862534523, 0.14632749557495117, -0.006689897272735834, 0.055683452636003494, 0.054283399134874344, -0.051983896642923355, 0.07141284644603729, 0.1341431438922882, -0.012575211934745312, -0.022263558581471443, 0.04237769544124603, 0.006404293701052666, -0.06824525445699692, -0.09059368073940277, -0.07710494846105576, -0.12042004615068436, -0.031867049634456635, -0.037330564111471176, 0.15698489546775818, -0.0480119064450264, -0.057643674314022064, 0.006484034471213818, -0.0179137010127306, 0.09336280822753906, -0.09850046038627625, -0.016256872564554214, 0.04423898458480835, -0.07797042280435562, -0.06017712876200676, -0.0747360959649086, -0.029125187546014786, 0.0480320118367672, -0.08320131152868271, -0.05978340283036232, -0.02852298878133297, 0.006713979411870241, -0.03157542273402214, 0.06206091120839119, -0.0769236758351326, 0.08299960941076279, -0.14243772625923157, -0.182865172624588, 0.0615081787109375, -0.002179098082706332, -0.023256294429302216, 0.1455519050359726, -0.0017809083219617605, 0.04947512969374657, -0.03622288629412651, -0.05452581122517586, 0.09395091235637665, -0.0877622738480568, 0.07577941566705704, 0.026975033804774284, 0.11317441612482071, -0.1840934157371521, 0.03810882568359375, -0.06222391128540039, 0.05935455858707428, 0.06786934286355972, 0.06490407139062881, -0.1519750952720642, 0.015290996991097927, -0.06616874784231186, -0.027194837108254433, -0.05860179290175438, -0.010806065984070301, -0.01106701698154211, 0.05961861088871956, -0.24424920976161957, -0.041746433824300766, 0.15409769117832184, -0.0924091637134552, -0.20524583756923676, -0.0005698263412341475, -0.012547170743346214, -0.034655068069696426, -0.006053943652659655, 0.3603517413139343, 0.17706798017024994, -0.0997827872633934, -0.04679549112915993, 0.18043600022792816, -0.15917904675006866, -0.2840941250324249, 0.058740004897117615, 0.0537431463599205, -0.14799119532108307, 0.03508622944355011, 0.10004284232854843, 0.029284877702593803, -0.06598949432373047, -0.14128446578979492, -0.01962156966328621, -0.16714319586753845, 0.05973177030682564, -0.016897790133953094, 0.039903681725263596, 0.0339660719037056, 0.1449311524629593, -0.07668066769838333, 0.07845261693000793, 0.018991494551301003, -0.018501481041312218, -0.05337408557534218, 0.18803615868091583, -0.10490629076957703, 0.026039963588118553, -0.12540337443351746, -0.135987788438797, -0.008886328898370266, -0.08737896382808685, 0.021712539717555046, 0.056813016533851624, 0.06815479695796967, -0.02114148996770382, 0.014065567404031754, 0.012173582799732685, 0.06908467411994934, 0.057917483150959015, 0.010082348249852657, 0.0021192580461502075, 0.029987748712301254, -0.08562455326318741, -0.08714691549539566, 0.0041664461605250835, -0.06530217826366425, 0.07658111304044724, -0.03575346618890762, 0.004596874583512545, 0.04258285090327263, 0.0531155988574028, 0.034183405339717865, 0.04654150456190109, -0.016999157145619392, 0.028360093012452126, -0.07207140326499939, -0.060948219150304794, 0.020471492782235146, -0.0088014742359519, 0.2021840363740921, 0.0963093563914299, 0.0006036683917045593, 0.060834724456071854, -0.12432216852903366, -0.00419350853189826, -0.0048936158418655396, -0.014725831337273121, 0.07374218851327896, -0.06849976629018784, -0.0431411936879158, 0.038366157561540604, -0.07899324595928192, 0.03174055367708206, 0.04966100677847862, 0.005888710729777813, -0.11151738464832306, 0.08645891398191452, 0.16379274427890778, -0.1883218139410019, 0.09375179558992386, 0.2212022840976715, 0.13675981760025024, 0.11665870249271393, -0.07646908611059189, -0.06842834502458572, -0.008661152794957161, -0.0004862298665102571, -0.07664613425731659, 0.2012445479631424, -0.04567229002714157, 0.02049275115132332, 0.07108142971992493, 0.04084765166044235, 0.07859871536493301, -0.10091452300548553, -0.13132689893245697, -0.02376929670572281, -0.06954490393400192, -0.29824134707450867, 0.04204441234469414, 0.015081089921295643, 0.08782833069562912, 0.015100155025720596, -0.009552857838571072, 0.09839283674955368, -0.03125777840614319, -0.0543580986559391, 0.14048077166080475, -0.1717778891324997, -0.22328849136829376, -0.003227818990126252, -0.09410392493009567, 0.009959038347005844, -0.04700200632214546, -0.03549269586801529, -0.17968399822711945, -0.01725359819829464, 0.02246764488518238, -0.0750306099653244, -0.13669775426387787, 0.016093261539936066, -0.001789140747860074, 0.009725820273160934, -0.03206421434879303, -0.11207862198352814, 0.02256942167878151, -0.06884806603193283, 0.06445233523845673, 0.12250663340091705, -0.11462705582380295, 0.12760606408119202, 0.10201511532068253, -0.048567287623882294, 0.11017274856567383, -0.04734879359602928, 0.25926917791366577, -0.004989026580005884, -0.04342419654130936, 0.041430238634347916, 0.03274582698941231, 0.02684265375137329, 0.1237424984574318, 0.04302862659096718, -0.12400784343481064, -0.035520922392606735, -0.015743665397167206, -0.18335165083408356, -0.20234690606594086, -0.11188338696956635, -0.03404304012656212, 0.09925375133752823, 0.007562422659248114, 0.0992804765701294, -0.050115957856178284, 0.0363033302128315, 0.15180078148841858, -0.02551589347422123, -0.1825602799654007, -0.07698960602283478, 0.012252066284418106, 0.011130901984870434, -0.03504593297839165, -0.14059822261333466, 0.05263751372694969, 0.13251599669456482, 0.3368362486362457, 0.230290949344635, 0.030293529853224754, 0.0849292129278183, 0.05600022152066231, 0.2586961090564728, 0.13319969177246094, 0.09801950305700302, 0.030901707708835602, -0.03808276355266571, 0.0390026792883873, 0.01767968200147152, -0.049614038318395615, -0.005247841123491526, 0.13956131041049957, -0.12460003048181534, -0.028047828003764153, -0.021199898794293404, 0.022193990647792816, -0.13827753067016602, 0.07172460854053497, -0.07298903167247772, 0.07695456594228745, 0.007776098325848579, 0.13161252439022064, -0.04351310059428215, 0.11771377921104431, 0.0585671029984951, -0.03681271895766258, 0.000462585681816563, 0.049653060734272, 0.0660116970539093, -0.06231386959552765, -0.008474391885101795, -0.059988729655742645, -0.12016308307647705, -0.013638866133987904, 0.14101682603359222, -0.21950046718120575, 0.28669172525405884, 0.051788702607154846, -0.05495007708668709, -0.06072256341576576, -0.09613847732543945, -0.04044128581881523, 0.03449732065200806, 0.18635565042495728, 0.049373455345630646, -0.1344827115535736, -0.27955564856529236, -0.061291392892599106, 0.050490785390138626, 0.09781643748283386, 0.11104249209165573, -0.09981458634138107, 0.061626236885786057, 0.00489191897213459, -0.057242218405008316, -0.026904510334134102, -0.05620519444346428, -0.06721960753202438, -0.006058594211935997, 0.034248318523168564, -0.07170869410037994, 0.019009355455636978, 0.007900330238044262, -0.17326000332832336, -0.058312561362981796, -0.03542085364460945, -0.0632854476571083, -0.08747922629117966, -0.03407998010516167, 0.11491429060697556, -0.05866939201951027, -0.012507300823926926, 0.012066883035004139, -0.043469276279211044, -0.03626413270831108, -0.2080816775560379, 0.027450116351246834, -0.060664474964141846, 0.09733986854553223, -0.05415698513388634, 0.10245344042778015, -0.021531252190470695, 0.03859533742070198, 0.009288451634347439, -0.030886581167578697, -0.04217022284865379, -0.10671033710241318, 0.1782512664794922, -0.16556501388549805, -0.004110128153115511, 0.13030685484409332, 0.01629680022597313, 0.09359626471996307, 0.06731536239385605, 0.006942375563085079, 0.2219373732805252, 0.17193040251731873, -0.08984462171792984, 0.21900352835655212, 0.1873420625925064, -0.032242551445961, -0.2307654321193695, -0.04256056994199753, -0.13769525289535522, -0.02621997520327568, 0.03719629347324371, -0.19425159692764282, 0.04701943323016167, 0.11083495616912842, -0.020046673715114594, 0.2260431945323944, -0.16259537637233734, -0.007416645530611277, 0.18266268074512482, 0.0694911926984787, 0.29024335741996765, -0.09833533316850662, -0.06122899800539017, -0.024446753785014153, -0.16405506432056427, 0.15328435599803925, -0.036364734172821045, 0.05908280238509178, -0.04422348365187645, 0.15685269236564636, -0.006810307037085295, -0.046499717980623245, 0.15943069756031036, 0.10256334394216537, 0.0801660567522049, -0.05581922456622124, -0.11346539109945297, 0.03067687712609768, -0.0654214397072792, 0.013283212669193745, 0.02692052535712719, 0.08653649687767029, -0.1589939147233963, 0.015380379743874073, -0.014144889079034328, -0.01093912310898304, 0.05465537682175636, -0.08008837699890137, -0.07912316173315048, 0.017435498535633087, -0.03760254383087158, -0.03827302157878876, 0.02135360613465309, -0.014044624753296375, -0.011741344816982746, 0.09693286567926407, -0.0007894466398283839, -0.10799113661050797, 0.0001416007726220414, -0.05594579875469208, -0.07486292719841003, 0.09806737303733826, -0.2235262244939804, 0.06956860423088074, 0.15948809683322906, 0.01447505597025156, 0.02945731207728386, 0.05479202792048454, -0.0012990201357752085, 0.06104286015033722, 0.1285143494606018, -0.07732189446687698, -0.10922440141439438, 0.042905282229185104, -0.13938036561012268, 0.038215842097997665, 0.05663353204727173, -0.014011331833899021, 0.04418226704001427, 0.0036570189986377954, -0.03527111932635307, 0.02974875643849373, -0.0651121586561203, 0.06311967223882675, 0.19212964177131653, 0.07830382883548737, -0.18015970289707184, 0.17749980092048645, 0.00016971131844911724, -0.03538711741566658, 0.031349845230579376, -0.008389457128942013, -0.09764877706766129, -0.09587068855762482, -0.0236399844288826, 0.2216070294380188, -0.11808441579341888, -0.10667479783296585, 0.02086646482348442, -0.06267939507961273, 0.04426484927535057, 0.23383623361587524, -0.002832825528457761, 0.06714357435703278, 0.04042349383234978, -0.06137394905090332, 0.0007135348860174417, 0.012479682452976704, -0.1541707068681717, -0.021793462336063385, -0.09345409274101257, -0.18165861070156097, -0.08108297735452652, 0.3239557445049286, -0.05164593830704689, -0.09386094659566879, -0.1092812716960907, 0.09395087510347366, -0.2782911956310272, 0.04835643619298935, 0.01887989416718483, 0.0490218885242939, -0.018588948994874954, 0.0012283772230148315, -0.036121562123298645, 0.022757260128855705, -0.12489600479602814, 0.10544586926698685, 0.042012836784124374, 0.030088158324360847, -0.11086155474185944, -0.025632251054048538, 0.03506865352392197, 0.10972537845373154, 0.11642535775899887, 0.08912289887666702, -0.0009799033869057894, 0.15273523330688477, -0.0501255989074707, -0.11408460885286331, 0.02694408781826496, 0.0700581967830658, 0.09168218821287155, 0.04517800733447075, 0.004069320857524872, 0.023709233850240707, -0.11974773555994034, 0.05486699566245079, -0.041575923562049866, -0.040006112307310104, -0.01878664642572403, -0.14474385976791382, -0.024211470037698746, -0.019908063113689423, -0.0364951454102993, 0.14690110087394714, 0.03181411698460579, -0.05434231087565422, 0.058736708015203476, 0.028637750074267387, -0.02541293576359749, -0.07726603001356125, 0.04357017204165459, -0.09545089304447174, -0.08739884197711945, 0.03505931422114372, 0.03833656385540962, -0.08457001298666, 0.40664592385292053, -0.03808306157588959, -0.12124524265527725, -0.04254532605409622, 0.09732438623905182, -0.08085832744836807, -0.030677659437060356, 0.31617581844329834, 0.08908747136592865, -0.02018970251083374, 0.00041984915151260793, 0.07650838047266006, -0.045942582190036774, 0.21424150466918945, 0.11980699002742767, 0.021045085042715073, 0.11575296521186829, -0.022266138345003128, 0.08436621725559235, -0.09760632365942001, 0.050019554793834686, -0.04103604704141617, -0.04095352441072464, -0.004994952119886875, 0.03930960223078728, -0.10570823401212692, 0.055282726883888245, -0.018382525071501732, 0.06125429645180702, -0.029323505237698555, -0.12416207045316696, -0.11295349150896072, -0.13974781334400177, -0.041249293833971024, -0.09247490763664246, 0.05182035267353058, -0.07533160597085953, -0.018202437087893486, 0.16313536465168, 0.07166073471307755, 0.04686298593878746, 0.0554315410554409, -0.028533464297652245, 0.07444559782743454, -0.014835108071565628, -0.0106468815356493, -0.04839586839079857, 0.008092643693089485, 0.006549849174916744, 0.08749362826347351, -0.10446658730506897, -0.055797651410102844, 0.010871389880776405, 0.03582322597503662, 0.025934938341379166, -0.17141588032245636, -0.042083024978637695, -0.08475194126367569, 0.08781803399324417, -0.06583284586668015, 0.11540046334266663, 0.04039181023836136, 0.0267042089253664, 0.04271441325545311, 0.10167255252599716, 0.07173582911491394, 0.0033884267322719097, -0.03153035789728165, -0.008813065476715565, -0.07740350067615509, 0.07645494490861893, 0.005833192262798548, -0.049178555607795715, -0.02936156652867794, 0.23353658616542816, 0.235983744263649, -0.09683030098676682, 0.013067408464848995, 0.004907764028757811, 0.0477026142179966, 0.04076850414276123, 0.16953741014003754, 0.05152306705713272, 0.11806941032409668, -0.06131551042199135, -0.10580343753099442, -0.03065147064626217, -0.03343937173485756, -0.013402738608419895, 0.1449432075023651, 0.057384781539440155, -0.04043181613087654, -0.13920576870441437, 0.0961855798959732, -0.23427711427211761, 0.15413038432598114, 0.06724061816930771, -0.06511016935110092, -0.094882532954216, -0.048118844628334045, 0.06639751046895981, 0.04076090082526207, -0.07060030102729797, -0.09666470438241959, 0.004420592449605465, -0.01239194255322218, -0.014935147948563099, -0.30787646770477295, -0.16298598051071167, 0.021775467321276665, 0.1643676608800888, 0.08077776432037354, 0.010431389324367046, 0.09780079871416092, -0.00245724618434906, 0.05876211076974869, -0.09423220902681351, 0.11187379062175751, 0.04803410544991493, -0.012926501221954823, -0.10224273800849915, 0.021939361467957497, -0.018791528418660164, -0.0368085615336895, 0.059311047196388245, 0.04772492125630379, -0.032453496009111404, 0.07660754024982452, 0.011907865293323994, -0.01939293183386326, 0.0017549882177263498, -0.036485109478235245, 0.12651683390140533, 0.005346107296645641, -0.007565510459244251, 0.012503805570304394, -0.029735924676060677, 0.06167979538440704, 0.011929499916732311, -0.09569695591926575, -0.06387341767549515, 0.056822244077920914, -0.05585234612226486, 0.08214697986841202, -0.031961146742105484, -0.07703476399183273, 0.015099598094820976, -0.09420251846313477, 0.04515291750431061, 0.021089375019073486, 0.11226609349250793, 0.1749109923839569, 0.08958756178617477, -0.029948445037007332, 0.007296151947230101, 0.054871782660484314, -0.002344694687053561, -0.05501210689544678, -0.11578591912984848 ]
e1b564e7c3923538d92f3e5e9c699790a1d7f2ed
If the dataseet is too big to dowload, you can check the image or preview_dataset
masya420/KSL
[ "region:us" ]
2024-01-25T06:30:19+00:00
{}
2024-01-25T06:43:43+00:00
[]
[]
TAGS #region-us
If the dataseet is too big to dowload, you can check the image or preview_dataset
[]
[ "TAGS\n#region-us \n" ]
[ 6 ]
[ "passage: TAGS\n#region-us \n" ]
[ 0.024608636274933815, -0.026205500587821007, -0.009666500613093376, -0.10395516455173492, 0.08638657629489899, 0.059816278517246246, 0.01882290467619896, 0.020661840215325356, 0.23975107073783875, -0.005599027033895254, 0.1219947561621666, 0.0015615287702530622, -0.037353623658418655, 0.03733762726187706, -0.0035912662278860807, -0.17583473026752472, 0.03876631706953049, -0.018274923786520958, 0.01843859627842903, 0.026470553129911423, -0.07776834815740585, -0.07564429938793182, 0.015296397730708122, -0.10247814655303955, -0.083692267537117, 0.11002834886312485, 0.031466204673051834, -0.019670886918902397, 0.10779199749231339, -0.04243955761194229, 0.18699054419994354, -0.011512263678014278, -0.11213519424200058, -0.2536850869655609, 0.021806683391332626, -0.01765260472893715, -0.08747660368680954, 0.01506110467016697, 0.0665089413523674, -0.09014441072940826, -0.0588928684592247, 0.0795099288225174, -0.01132340170443058, 0.04246443510055542, -0.27593839168548584, -0.12684126198291779, -0.05297930911183357, -0.1421966552734375, 0.08651168644428253, 0.04035491496324539, 0.008764253929257393, 0.15506891906261444, -0.20897391438484192, 0.004104613792151213, 0.08255259692668915, -0.2538507878780365, 0.05591634660959244, 0.17671173810958862, 0.03623908758163452, 0.18037272989749908, 0.0060391901060938835, 0.11029672622680664, 0.0716743916273117, -0.024263937026262283, -0.17590197920799255, -0.08127854019403458, -0.04696211963891983, 0.16642488539218903, -0.06727185100317001, -0.14248386025428772, 0.34701237082481384, 0.00015008423360995948, 0.009657775051891804, 0.16921205818653107, -0.059524230659008026, -0.09972117841243744, 0.07259953022003174, 0.016484731808304787, 0.018492350354790688, 0.1471305936574936, 0.16307872533798218, -0.0458691343665123, -0.13837823271751404, -0.018630273640155792, -0.22798998653888702, 0.17510560154914856, -0.03248048573732376, 0.13137903809547424, -0.27447956800460815, 0.01684025302529335, -0.2570667266845703, 0.0032130838371813297, 0.04178816080093384, -0.06004921346902847, -0.0226522795855999, -0.013265985064208508, -0.08018817007541656, 0.004899587947875261, 0.06192673370242119, 0.1266920566558838, -0.06128726154565811, 0.06128238886594772, -0.09319206327199936, 0.141696035861969, 0.07166698575019836, 0.07868369668722153, 0.13037432730197906, 0.041205424815416336, -0.07187089323997498, -0.21872246265411377, -0.0026476888451725245, -0.06275863200426102, -0.09502086788415909, -0.0020165652967989445, -0.11606067419052124, 0.17244569957256317, -0.030802514404058456, -0.09825427830219269, -0.11208184063434601, 0.09148659557104111, -0.032992321997880936, -0.03437839448451996, -0.03552987426519394, -0.020977836102247238, 0.019381176680326462, 0.04704452306032181, -0.1548958420753479, -0.005131472367793322, 0.07039852440357208, 0.11502562463283539, -0.1346137970685959, -0.003783059772104025, -0.07908964157104492, 0.03039063885807991, 0.07654735445976257, -0.16510222852230072, 0.03158547356724739, -0.1124754324555397, -0.07531405985355377, 0.002912673633545637, -0.015710093080997467, -0.016202643513679504, 0.166526660323143, -0.0020451415330171585, 0.0714716836810112, -0.026345307007431984, -0.05890209600329399, -0.11243434250354767, -0.08489254862070084, 0.05390460044145584, 0.03670717030763626, 0.03266148269176483, -0.2193479984998703, 0.014805203303694725, -0.12762966752052307, 0.1360815018415451, -0.10566820204257965, -0.04705966264009476, -0.022842247039079666, 0.20562705397605896, 0.037286072969436646, 0.08762791007757187, -0.22171171009540558, 0.039756543934345245, -0.05404696613550186, 0.18480908870697021, -0.1502426266670227, -0.0799463614821434, 0.20813211798667908, -0.07964949309825897, -0.10115210711956024, 0.021235812455415726, 0.020391687750816345, 0.026287272572517395, 0.0766737088561058, 0.4564172327518463, -0.09766800701618195, -0.09146861732006073, 0.10178250074386597, 0.17055274546146393, -0.12427149713039398, -0.1827561855316162, 0.06446871906518936, -0.16666454076766968, -0.1973118633031845, 0.0018917324487119913, 0.09222044050693512, 0.038269978016614914, -0.07875611633062363, -0.020746968686580658, 0.06325206160545349, -0.0007678253459744155, 0.09095914661884308, 0.03755716234445572, 0.09034032374620438, -0.08716782182455063, 0.11115926504135132, -0.05017651244997978, 0.004037132486701012, 0.1343354731798172, 0.027325427159667015, -0.03223329409956932, 0.08694463223218918, -0.0485352948307991, 0.05295134335756302, -0.1662379503250122, -0.15068690478801727, 0.03398871049284935, 0.06283251196146011, 0.03186952322721481, 0.1280253529548645, 0.08141885697841644, -0.10732853412628174, 0.022690722718834877, -0.004228927195072174, 0.058398615568876266, 0.03891623765230179, 0.006107209715992212, 0.008764320984482765, 0.0961301177740097, -0.10607069730758667, -0.13589619100093842, -0.07336436957120895, -0.014715781435370445, 0.14371353387832642, -0.0302802175283432, 0.07690227776765823, -0.004240254405885935, 0.00013200697139836848, 0.06930823624134064, 0.08137880265712738, 0.016412746161222458, 0.08971183747053146, -0.05237193778157234, -0.05160155147314072, 0.10863113403320312, -0.13533565402030945, 0.17837053537368774, 0.14053137600421906, -0.20532016456127167, 0.029453208670020103, -0.06838275492191315, 0.03670361638069153, -0.008162540383636951, 0.0975119024515152, -0.08272241055965424, -0.02106042578816414, 0.013134466484189034, 0.0052274600602686405, -0.013007243163883686, 0.017682146281003952, -0.07295988500118256, -0.07787393033504486, -0.10233919322490692, 0.08436838537454605, 0.11562882363796234, -0.10282530635595322, 0.14214380085468292, 0.4384984076023102, 0.11495281755924225, 0.21582984924316406, -0.09581480920314789, -0.0412987545132637, 0.007486371789127588, 0.0001535322517156601, -0.04476691037416458, 0.08031861484050751, -0.15973517298698425, -0.038901735097169876, 0.027348900213837624, 0.07128690183162689, 0.11475157737731934, -0.14959022402763367, -0.09639324247837067, -0.00793045200407505, 0.0022841424215584993, -0.1249532699584961, 0.023905446752905846, -0.03974650055170059, 0.04015624523162842, 0.07232289016246796, -0.021535737439990044, 0.13939237594604492, -0.04166141897439957, -0.0639561116695404, 0.07585346698760986, -0.2017085999250412, -0.23179671168327332, -0.12309670448303223, -0.14680525660514832, 0.04366797208786011, 0.05154111236333847, 0.01726446859538555, -0.17635835707187653, -0.015074856579303741, 0.07706750929355621, 0.07820965349674225, -0.20886357128620148, -0.022814949974417686, -0.004290030337870121, 0.0895976573228836, -0.10227091610431671, -0.0017130117630586028, -0.04419664293527603, -0.10150232166051865, 0.0017003051470965147, 0.07279510796070099, -0.137485533952713, 0.13807645440101624, 0.21589438617229462, 0.07225540280342102, 0.07359948754310608, -0.019093448296189308, 0.09936179965734482, -0.10856141895055771, -0.16549113392829895, 0.08348225057125092, -0.06234746053814888, 0.047262318432331085, 0.17534415423870087, 0.03307317942380905, -0.13904969394207, -0.015682822093367577, -0.0402069091796875, -0.15603256225585938, -0.238995760679245, -0.09178274869918823, -0.1182505264878273, 0.16442428529262543, 0.0009358620154671371, 0.06651917099952698, 0.08258313685655594, -0.022042419761419296, 0.16447891294956207, -0.07379321753978729, -0.07578866183757782, -0.006978808436542749, 0.12375060468912125, -0.056660156697034836, -0.03080669604241848, -0.10566964000463486, -0.008295975625514984, 0.1151021271944046, 0.15304014086723328, 0.12214863300323486, 0.2957419455051422, 0.08268889784812927, 0.026645636186003685, 0.08958091586828232, 0.17622539401054382, 0.09495089203119278, 0.07838419824838638, -0.045413073152303696, -0.014814783819019794, 0.014317171648144722, -0.04022889584302902, 0.010141594335436821, 0.14683100581169128, -0.2679629921913147, -0.006678564939647913, -0.2710230350494385, 0.0965198427438736, -0.10913380235433578, 0.11837165057659149, -0.01015760749578476, 0.10194015502929688, 0.11082887649536133, 0.03233652561903, -0.03858073800802231, 0.16613617539405823, 0.08450309932231903, -0.11277695000171661, 0.001758623169735074, 0.03737903758883476, 0.09715615212917328, -0.02818971499800682, 0.12721189856529236, -0.11048974841833115, -0.1464834064245224, 0.013753619976341724, 0.07152791321277618, -0.15373679995536804, 0.3138748109340668, 0.012069208547472954, -0.13481520116329193, -0.01481647603213787, -0.09957809001207352, -0.006440147757530212, 0.1254177987575531, 0.09333524852991104, 0.07935678958892822, -0.2185502052307129, -0.13339371979236603, 0.05872276425361633, -0.00575496768578887, 0.22408108413219452, -0.034034017473459244, -0.11356475204229355, -0.027013886719942093, 0.04241163283586502, -0.06043251231312752, 0.08524788916110992, 0.023536119610071182, -0.08113526552915573, -0.032957352697849274, 0.05323701351881027, 0.012368366122245789, 0.00524376705288887, 0.09360801428556442, 0.020107939839363098, -0.0009265501867048442, 0.01785753294825554, 0.047885000705718994, -0.0675911232829094, -0.1984109878540039, 0.09357594698667526, -0.05215044692158699, 0.0015536568826064467, -0.08013670891523361, -0.15122665464878082, -0.08837161958217621, -0.16009655594825745, 0.12540200352668762, -0.034406669437885284, 0.12700119614601135, -0.06619787961244583, 0.17341409623622894, -0.07871770113706589, 0.04481020197272301, -0.047349292784929276, 0.050332702696323395, -0.007268077693879604, -0.07756082713603973, 0.16585899889469147, -0.15564003586769104, 0.01809087023139, 0.19572502374649048, -0.018915493041276932, 0.07177707552909851, 0.021322092041373253, -0.0636206790804863, 0.23147478699684143, 0.3014698624610901, 0.008138049393892288, 0.1665448248386383, 0.3018903136253357, -0.07466315478086472, -0.2642788887023926, -0.05505012720823288, -0.2841376066207886, -0.05371501296758652, 0.10716094076633453, -0.22523896396160126, 0.06986407935619354, 0.14383509755134583, -0.06471995264291763, 0.30228954553604126, -0.21825523674488068, 0.012589273042976856, 0.15434536337852478, -0.08868814259767532, 0.5515313148498535, -0.1133413165807724, -0.17677772045135498, -0.008122089318931103, -0.08741296827793121, 0.10602109134197235, -0.0340677872300148, 0.06877441704273224, 0.013465235009789467, 0.04797380417585373, 0.048932258039712906, -0.03111894056200981, 0.22701001167297363, 0.008710170164704323, 0.09015397727489471, -0.07378865778446198, -0.18624304234981537, 0.11639340221881866, -0.04359482601284981, -0.08891059458255768, 0.0849778801202774, -0.05942516401410103, -0.11078983545303345, 0.04663389176130295, -0.07950539886951447, -0.024862350896000862, 0.08423490077257156, -0.04678233340382576, -0.042606171220541, -0.008054176345467567, -0.1618063747882843, -0.0002289071271661669, 0.31360217928886414, -0.07096036523580551, 0.16695955395698547, 0.03677211329340935, 0.00038613268407061696, -0.11027684062719345, 0.030288029462099075, -0.05203165486454964, -0.021576624363660812, 0.09578979015350342, -0.11096979677677155, 0.03204701095819473, 0.14160704612731934, -0.04864364117383957, 0.05846960097551346, 0.09256096184253693, -0.0849417969584465, 0.007583672646433115, 0.17753590643405914, -0.17537221312522888, -0.1273445188999176, -0.006135711446404457, -0.09862716495990753, 0.14055661857128143, 0.04394126310944557, 0.05191568285226822, 0.16669964790344238, 0.03967129811644554, -0.029474308714270592, -0.02817419543862343, -0.1153380498290062, -0.0201893113553524, 0.040153320878744125, 0.00045633706031367183, -0.08791285753250122, 0.2262638509273529, 0.06409153342247009, -0.1328488290309906, -0.051157206296920776, 0.2161225974559784, -0.06805316358804703, -0.04911920800805092, -0.223562553524971, 0.10752306133508682, -0.07112517952919006, -0.0965060144662857, 0.05453834682703018, -0.02270081453025341, 0.005106312222778797, 0.181985542178154, 0.03941008821129799, 0.11070270836353302, 0.03738937899470329, -0.02448922023177147, 0.15798696875572205, -0.142850860953331, -0.14191335439682007, -0.025354057550430298, -0.08757315576076508, -0.13844476640224457, -0.026804137974977493, 0.1617041826248169, -0.09177309274673462, -0.14772607386112213, -0.2621181011199951, 0.10968475043773651, -0.16432365775108337, -0.10192688554525375, -0.03469514101743698, -0.08968492597341537, 0.0696166530251503, 0.030301768332719803, -0.03093348816037178, -0.06706760823726654, -0.18593791127204895, 0.0816768929362297, 0.06349513679742813, 0.045533183962106705, -0.017847947776317596, 0.0067379772663116455, 0.1720137596130371, 0.025955144315958023, 0.10040043294429779, 0.16762186586856842, 0.011397695168852806, 0.2246655523777008, -0.1671202927827835, -0.11496317386627197, 0.1336962729692459, -0.026543032377958298, 0.06762003898620605, 0.16792191565036774, -0.0772583931684494, 0.015526676550507545, -0.028136352077126503, 0.07066910713911057, -0.11003983020782471, -0.105624258518219, 0.007937257178127766, 0.02567129209637642, -0.2755882740020752, -0.005599735304713249, -0.19717298448085785, 0.14788752794265747, 0.02579621411859989, 0.03297143429517746, 0.10257530212402344, 0.10404334217309952, 0.08312062919139862, -0.0017710148822516203, 0.03226327523589134, -0.1176818460226059, 0.02753005363047123, -0.059239376336336136, -0.020663779228925705, 0.017624232918024063, 0.36952024698257446, -0.03603357449173927, -0.046802736818790436, 0.003710439894348383, 0.1307835876941681, -0.02139742486178875, 0.017395347356796265, 0.13209912180900574, 0.12607666850090027, -0.08595693111419678, -0.1504845917224884, 0.04888554662466049, -0.04565655067563057, -0.02836887165904045, 0.1464131623506546, 0.05905961990356445, 0.1050296202301979, 0.0908031314611435, -0.014463032595813274, -0.00318976235575974, 0.012856799177825451, -0.15486004948616028, 0.06223496049642563, -0.010558074340224266, 0.012565906159579754, 0.017934376373887062, 0.15238402783870697, -0.005540105979889631, 0.07739730179309845, -0.09889880567789078, 0.004208535887300968, -0.13498884439468384, -0.07913459837436676, 0.03617347031831741, -0.13393273949623108, 0.04141177982091904, -0.01871878281235695, 0.029611799865961075, 0.30386561155319214, 0.02558239921927452, -0.020639164373278618, 0.12512871623039246, -0.1214587539434433, -0.12050267308950424, -0.001594188273884356, -0.029960084706544876, 0.0791488066315651, -0.02633434161543846, -0.0997740775346756, -0.1001306027173996, -0.15166029334068298, -0.09759195148944855, 0.05182836204767227, -0.04993441700935364, -0.059362251311540604, -0.17634081840515137, -0.05707859992980957, -0.05147340148687363, 0.14025864005088806, -0.12263951450586319, 0.15159130096435547, -0.014490418136119843, 0.004084470681846142, 0.04405883327126503, 0.1950942426919937, -0.03644494712352753, 0.08714226633310318, 0.0154351145029068, 0.1522706001996994, -0.05119588226079941, 0.14720745384693146, -0.10931728035211563, -0.04014137014746666, -0.06710435450077057, 0.21513493359088898, 0.25630924105644226, -0.06136954948306084, -0.008937356993556023, -0.012760217301547527, 0.058654606342315674, 0.1073930487036705, 0.16049085557460785, 0.002326392102986574, 0.2802925705909729, -0.03133585304021835, 0.04815128445625305, 0.02901598811149597, 0.013607407920062542, -0.06336209923028946, 0.03397751972079277, 0.07539387792348862, -0.035039983689785004, -0.1412304788827896, 0.15837742388248444, -0.21980468928813934, 0.18157227337360382, 0.11640069633722305, -0.19996967911720276, -0.013728445395827293, -0.04882071167230606, 0.1689416468143463, -0.0856364443898201, 0.1637246012687683, -0.0903693437576294, -0.2108195722103119, -0.2056000679731369, 0.03867346793413162, -0.34623071551322937, -0.254462867975235, 0.10422009229660034, 0.1488201916217804, 0.04015883058309555, -0.018507536500692368, -0.019967829808592796, -0.018367022275924683, 0.04877542704343796, -0.0067357709631323814, 0.06014643982052803, 0.031397558748722076, -0.02988368645310402, -0.24127542972564697, -0.029804671183228493, 0.023964406922459602, -0.07093082368373871, 0.07464958727359772, -0.06874357163906097, -0.022495782002806664, 0.08059766888618469, -0.03066304884850979, 0.03298592567443848, -0.035373736172914505, -0.16326889395713806, 0.027529051527380943, 0.03900543600320816, 0.036012712866067886, 0.00634160777553916, 0.0008072225609794259, -0.03455270454287529, 0.0644603744149208, -0.16716794669628143, -0.16015739738941193, 0.14140215516090393, -0.06745140254497528, 0.2779497504234314, -0.05812826007604599, -0.0809100940823555, 0.04766704887151718, -0.03426874056458473, 0.1807648241519928, -0.07756473124027252, 0.047254521399736404, 0.12766779959201813, 0.011127962730824947, 0.03121316432952881, -0.3092964291572571, 0.11082969605922699, -0.000795336440205574, -0.006093299947679043, -0.07581598311662674 ]
ace1295f7e2c44c7387a03d538220888cce18436
Raw dataset for "Fact-Aware Fake-news Classification for Indonesian Language"</br></br> <b>Disclaimer:</b> Beta version, contains imbalanced representation of domain-specific NON-HOAX samples. We will release a new training and evaluation suite soon as a replacement of this dataset. </br></br> Data originates from https://turnbackhoax.id/ (Mafindo data 2018-2023); </br> The attributes of data are: </br> 1. Label_id: Binary class labels ("HOAX"==1 ; "NON-HOAX"==0).</br> 2. Label: Binary class labels ("HOAX" or "NON-HOAX").</br> 3. Title: Claim or headline of news article.</br> 4. Title_cleaned: Preprocessed claim, by removing tag label at the beginning of the sentence.</br> 5. Content: the content of news article. </br> 6. Fact: The summary of factual evidence that is either supporting or contradicting the correponding claim.</br> 7. References: URL link of news article and the corresponding verdict or factual evidence as the justification of the news article.</br> 8. Classification: Fine-grained classification labels for the news article:</br> 'CekFakta', 'Fabricated Content', 'False Connection', 'False Context', 'Impostor Content', </br> 'Manipulated Content', 'Misleading Content', 'Satire', 'nan'.</br></br> Example of usage:</br> ```python >>> from datasets import load_dataset >>> train_dataset = load_dataset( ... "nlp-brin-id/fakenews-id-brin", ... split="train", ... keep_default_na=False, ... ).select_columns(['Label_id', 'Title_cleaned', 'Content', 'Fact']) ```
nlp-brin-id/fakenews-id-brin
[ "task_categories:text-classification", "size_categories:10K<n<100K", "language:id", "license:apache-2.0", "region:us" ]
2024-01-25T06:48:11+00:00
{"language": ["id"], "license": "apache-2.0", "size_categories": ["10K<n<100K"], "task_categories": ["text-classification"]}
2024-02-15T09:53:20+00:00
[]
[ "id" ]
TAGS #task_categories-text-classification #size_categories-10K<n<100K #language-Indonesian #license-apache-2.0 #region-us
Raw dataset for "Fact-Aware Fake-news Classification for Indonesian Language"</br></br> <b>Disclaimer:</b> Beta version, contains imbalanced representation of domain-specific NON-HOAX samples. We will release a new training and evaluation suite soon as a replacement of this dataset. </br></br> Data originates from URL (Mafindo data 2018-2023); </br> The attributes of data are: </br> 1. Label_id: Binary class labels ("HOAX"==1 ; "NON-HOAX"==0).</br> 2. Label: Binary class labels ("HOAX" or "NON-HOAX").</br> 3. Title: Claim or headline of news article.</br> 4. Title_cleaned: Preprocessed claim, by removing tag label at the beginning of the sentence.</br> 5. Content: the content of news article. </br> 6. Fact: The summary of factual evidence that is either supporting or contradicting the correponding claim.</br> 7. References: URL link of news article and the corresponding verdict or factual evidence as the justification of the news article.</br> 8. Classification: Fine-grained classification labels for the news article:</br> 'CekFakta', 'Fabricated Content', 'False Connection', 'False Context', 'Impostor Content', </br> 'Manipulated Content', 'Misleading Content', 'Satire', 'nan'.</br></br> Example of usage:</br>
[]
[ "TAGS\n#task_categories-text-classification #size_categories-10K<n<100K #language-Indonesian #license-apache-2.0 #region-us \n" ]
[ 42 ]
[ "passage: TAGS\n#task_categories-text-classification #size_categories-10K<n<100K #language-Indonesian #license-apache-2.0 #region-us \n" ]
[ -0.0038698099087923765, -0.024598995223641396, -0.0026081723626703024, 0.0162563044577837, 0.04456763342022896, 0.04332485795021057, 0.1569293737411499, 0.13245467841625214, 0.08598603308200836, -0.055698737502098083, 0.048896633088588715, -0.015988793224096298, 0.11644268780946732, 0.06750107556581497, -0.042538005858659744, -0.3627699017524719, 0.10864037275314331, -0.046017080545425415, 0.12486948817968369, 0.09479369223117828, 0.09354744851589203, -0.0026389192789793015, 0.032577481120824814, -0.05199596285820007, 0.013212224468588829, -0.040934398770332336, -0.05801188945770264, -0.13743607699871063, 0.018567508086562157, -0.05123928189277649, 0.08753206580877304, 0.010692640207707882, -0.025753678753972054, -0.18557140231132507, -0.018358523026108742, -0.05673676356673241, -0.01259388867765665, -0.009327978827059269, 0.007091987878084183, 0.08091564476490021, 0.09031080454587936, -0.09408161789178848, -0.06534577161073685, 0.0339755043387413, -0.08517792075872421, -0.1859893649816513, -0.10268325358629227, 0.10998012870550156, 0.12048594653606415, 0.046850044280290604, -0.01999947614967823, 0.09549355506896973, -0.19376051425933838, -0.04373624920845032, 0.08019037544727325, -0.2596215009689331, -0.0017021432286128402, 0.16482160985469818, 0.06301840394735336, 0.11886845529079437, -0.051188815385103226, -0.015474120154976845, 0.06363929808139801, -0.02284196950495243, -0.04761297255754471, -0.13801249861717224, -0.1685236394405365, -0.05336799845099449, 0.02212674170732498, -0.00521800946444273, 0.31315839290618896, 0.10914706438779831, -0.0009912621462717652, -0.10379128903150558, 0.0031716045923531055, -0.018481941893696785, -0.0769508108496666, 0.0889139175415039, 0.028389479964971542, 0.0749305710196495, 0.2661820352077484, 0.005440995562821627, -0.1036093458533287, -0.049634870141744614, -0.13217201828956604, 0.024455523118376732, 0.001857947907410562, 0.025583557784557343, -0.04262493550777435, -0.04759423807263374, -0.16543345153331757, -0.1375914067029953, -0.02510482259094715, -0.056801654398441315, -0.03679702430963516, 0.09152290970087051, 0.07368594408035278, 0.05605298653244972, 0.17440395057201385, 0.06812112033367157, 0.03745231404900551, 0.046436164528131485, 0.021716421470046043, 0.138591006398201, -0.0017212642123922706, 0.1223660558462143, -0.04347219690680504, -0.06561166048049927, 0.028879782184958458, -0.06241727992892265, 0.12975670397281647, -0.04175996035337448, -0.15033483505249023, -0.07967442274093628, -0.05739651992917061, 0.1225588247179985, -0.03197813406586647, 0.04260192811489105, -0.008121040649712086, -0.01613461598753929, 0.17768506705760956, -0.07222189754247665, -0.059395793825387955, -0.025016173720359802, -0.022483767941594124, 0.07522225379943848, -0.10625505447387695, 0.020499035716056824, -0.019752364605665207, 0.09068424999713898, 0.003863915102556348, 0.008051159791648388, -0.01912778429687023, -0.01509514544159174, 0.09763310104608536, -0.11503582447767258, -0.0029999911785125732, -0.0739152655005455, -0.2030620574951172, -0.0003939141461160034, 0.02712610550224781, -0.058835629373788834, -0.006164488382637501, 0.027662906795740128, -0.026185421273112297, 0.0230938121676445, -0.07866674661636353, 0.0394064262509346, -0.0906853973865509, 0.05813025310635567, -0.04513125866651535, 0.10265763849020004, -0.1608416885137558, -0.009383476339280605, -0.09276615083217621, 0.1049065813422203, -0.17581991851329803, -0.0171294454485178, -0.13416001200675964, 0.2246066778898239, -0.08456979691982269, -0.03437505662441254, -0.049804579466581345, -0.035105835646390915, -0.14105014503002167, 0.06203516200184822, -0.19590988755226135, 0.004963102750480175, 0.11418464779853821, -0.07578630745410919, -0.15082170069217682, 0.052436698228120804, 0.06817737221717834, 0.1443193107843399, 0.010590513236820698, 0.3522612154483795, -0.0022496082819998264, 0.15342019498348236, 0.047245901077985764, 0.17024295032024384, -0.020159604027867317, -0.16908515989780426, 0.14380207657814026, -0.06311097741127014, -0.001051047584041953, 0.06414671242237091, 0.018823323771357536, 0.02562609501183033, 0.08556705713272095, -0.09450053423643112, 0.003501137951388955, -0.06203067675232887, 0.059973858296871185, -0.006508249323815107, 0.12786532938480377, -0.09420546144247055, 0.032366104423999786, -0.06328946352005005, 0.08105938136577606, 0.06533150374889374, -0.02606135793030262, -0.009040889330208302, -0.014993978664278984, 0.011197766289114952, 0.057276953011751175, -0.08141766488552094, 0.07486549764871597, 0.05838305503129959, -0.04196277633309364, 0.06264430284500122, 0.05554511770606041, 0.0034854202531278133, -0.14998888969421387, -0.0673752948641777, 0.05897305905818939, 0.03264680877327919, 0.02145407721400261, 0.019244801253080368, -0.024428782984614372, 0.057575903832912445, 0.047083400189876556, 0.08957541733980179, -0.028664851561188698, 0.016060836613178253, 0.019140813499689102, -0.038261719048023224, -0.039226680994033813, 0.12634874880313873, 0.07722727954387665, 0.0105005893856287, -0.0038785238284617662, -0.03411533311009407, 0.04405408352613449, -0.016530562192201614, -0.12340927124023438, 0.09474188089370728, -0.041184861212968826, 0.09226714074611664, 0.16953356564044952, 0.024793967604637146, 0.05837881192564964, 0.015439136885106564, 0.04476345703005791, -0.03210144862532616, -0.05221860483288765, 0.029190316796302795, -0.06436469405889511, 0.03492988646030426, 0.07961943000555038, -0.08423472940921783, -0.04176276922225952, -0.01614479161798954, -0.0668661817908287, 0.05439302697777748, 0.03457716107368469, 0.10164690017700195, -0.2183951735496521, 0.17171691358089447, 0.24290885031223297, -0.00916069746017456, 0.09526091814041138, -0.07119643688201904, 0.013061506673693657, 0.002437771065160632, 0.0490909181535244, -0.04397733137011528, 0.07570259273052216, -0.2202412635087967, 0.00794614851474762, 0.12951701879501343, 0.11002081632614136, 0.01682353764772415, -0.09574995934963226, -0.11837448924779892, -0.06946169584989548, -0.06113733351230621, -0.1215309351682663, 0.008337398990988731, 0.0009829978225752711, 0.054695822298526764, -0.0023071274627000093, -0.03815922513604164, 0.05672406032681465, -0.03561568632721901, -0.0298684760928154, 0.12140641361474991, -0.08173645287752151, -0.309322327375412, -0.09644188731908798, 0.07104764133691788, -0.05173710733652115, -0.012808392755687237, 0.07002989202737808, -0.12047399580478668, -0.04823412001132965, -0.022980041801929474, -0.0669868141412735, -0.015381045639514923, -0.023267803713679314, -0.03706372156739235, 0.11162615567445755, -0.07607521116733551, -0.06552066653966904, -0.03447221592068672, 0.032524820417165756, 0.09056388586759567, 0.11164513975381851, -0.16410675644874573, 0.059813328087329865, 0.23336288332939148, -0.0021874981466680765, 0.016213176771998405, -0.024559857323765755, 0.05968624725937843, -0.14383351802825928, 0.08207100629806519, 0.16543813049793243, 0.028187846764922142, -0.014184844680130482, 0.22604772448539734, 0.03138446807861328, -0.06628010421991348, 0.026767101138830185, 0.016940871253609657, -0.07583919167518616, -0.29715874791145325, -0.0418921634554863, -0.12505775690078735, 0.09422805905342102, -0.011888938955962658, 0.049806147813797, 0.1467582881450653, 0.08679851144552231, -0.011489254422485828, 0.12996947765350342, 0.013298260979354382, 0.008443779312074184, 0.15269412100315094, 0.00948009081184864, 0.042552508413791656, -0.16169501841068268, -0.01394391804933548, 0.11008694767951965, 0.11778818815946579, 0.2000371217727661, 0.2421254962682724, 0.22252129018306732, 0.0759207084774971, 0.11924789100885391, 0.08996826410293579, 0.08869152516126633, -0.014509662054479122, 0.008656741119921207, -0.07976442575454712, -0.050319455564022064, 0.04193616658449173, 0.029169689863920212, -0.0960901752114296, -0.09235265105962753, 0.030127281323075294, -0.1265295147895813, 0.13236553966999054, 0.12669534981250763, 0.06425819545984268, 0.012174580246210098, 0.05186968296766281, 0.12298719584941864, -0.012852606363594532, -0.03747265413403511, 0.08481507748365402, 0.06055545434355736, -0.11651279777288437, 0.1387712061405182, 0.046769365668296814, 0.15726247429847717, -0.06775136291980743, 0.02403886988759041, -0.12332070618867874, -0.1806805580854416, 0.05079610273241997, 0.21233724057674408, -0.10905887931585312, 0.2897895872592926, 0.03839863836765289, -0.01812831684947014, -0.13575826585292816, -0.06568643450737, 0.05666029825806618, 0.1313513070344925, 0.08349579572677612, 0.050854943692684174, -0.11992635577917099, -0.019551722332835197, -0.13813380897045135, -0.0034622536040842533, 0.005683985538780689, 0.0461394265294075, -0.10639701783657074, 0.006168430205434561, 0.024127278476953506, 0.0022626498248428106, 0.038408249616622925, -0.0861884132027626, -0.05927899479866028, 0.0449383370578289, 0.191629558801651, -0.08662745356559753, -0.04366489127278328, 0.031331948935985565, 0.0635770633816719, 0.1665908545255661, -0.0791885182261467, -0.11718887090682983, -0.04995296150445938, -0.18878401815891266, 0.14341434836387634, -0.04168970510363579, -0.04350687190890312, -0.06163116171956062, -0.03025849722325802, -0.047250896692276, -0.044229716062545776, 0.1382674276828766, -0.019483657553792, -0.048538755625486374, -0.038088537752628326, 0.06614316254854202, -0.14700670540332794, 0.0641201063990593, 0.013901098631322384, -0.05545971542596817, 0.014613041654229164, -0.20335093140602112, -0.0825171172618866, 0.016300469636917114, -0.0199880488216877, 0.05639762431383133, -0.0390738882124424, -0.00696900999173522, -0.014785395935177803, -0.16005763411521912, 0.18701466917991638, 0.16356410086154938, -0.043810512870550156, 0.08727838844060898, 0.22564639151096344, -0.02450842224061489, -0.19792503118515015, -0.1265796720981598, -0.1275491863489151, -0.06351900100708008, -0.06801175326108932, -0.05242203548550606, 0.08282452821731567, 0.21915893256664276, -0.0832885131239891, 0.03665848821401596, -0.2557564973831177, -0.09214693307876587, 0.0910019651055336, -0.08453672379255295, 0.19268693029880524, -0.19372406601905823, -0.1218840554356575, -0.20282359421253204, -0.1455373466014862, 0.061828114092350006, -0.1823294311761856, 0.08012206852436066, -0.0370282344520092, -0.008339142426848412, -0.03996416553854942, 0.005994632374495268, 0.257800430059433, 0.07028637826442719, 0.05866943672299385, -0.07685048878192902, -0.10552538186311722, 0.20764243602752686, 0.0007392246043309569, 0.10850487649440765, -0.1734727919101715, -0.054796021431684494, -0.1422838270664215, -0.03346354141831398, -0.051429152488708496, 0.012880239635705948, 0.011791418306529522, -0.0747278481721878, -0.12907128036022186, 0.06102628633379936, -0.032934900373220444, 0.03715473785996437, 0.29228678345680237, 0.038612522184848785, -0.11875942349433899, 0.0032452773302793503, 0.05010717362165451, -0.12647850811481476, 0.07323813438415527, -0.09436493366956711, -0.008898098953068256, 0.0468573160469532, -0.2966322600841522, -0.042458560317754745, 0.061637941747903824, -0.06185073032975197, 0.10288338363170624, -0.006108872126787901, -0.0573735348880291, 0.04213752970099449, 0.1533532738685608, 0.10040313005447388, -0.11942335963249207, 0.05023583024740219, 0.0711626335978508, 0.0065950364805758, -0.07261904329061508, -0.08105376362800598, -0.020572038367390633, 0.0034725216683000326, 0.024547893553972244, 0.010723382234573364, -0.07733748108148575, 0.08172495663166046, 0.047404464334249496, 0.0756005197763443, -0.10890849679708481, 0.2325572520494461, 0.20583322644233704, -0.01514169480651617, -0.08176147192716599, 0.17891816794872284, -0.1222357526421547, -0.06386049091815948, 0.07133283466100693, 0.028595222160220146, 0.05843155458569527, -0.1201896071434021, -0.03296052664518356, -0.15223973989486694, -0.0772695243358612, -0.037856314331293106, 0.04061203822493553, 0.0010091625154018402, 0.03671037033200264, -0.08694390952587128, 0.11473726481199265, -0.012416194193065166, -0.030269475653767586, 0.04760380834341049, -0.10527770221233368, -0.17336082458496094, 0.11441836506128311, 0.07645488530397415, -0.013627625070512295, -0.03411359339952469, -0.10990513861179352, 0.023519661277532578, -0.14321616291999817, 0.14135108888149261, -0.12564392387866974, -0.012888015247881413, -0.03834209218621254, -0.029485052451491356, -0.04616335779428482, -0.1039838194847107, -0.10672588646411896, -0.03050416149199009, -0.07251136004924774, 0.13439209759235382, -0.0038879357744008303, -0.06547714024782181, 0.09856758266687393, 0.017479220405220985, 0.060978714376688004, 0.02820792980492115, -0.010649790987372398, 0.11200059205293655, -0.0911596342921257, -0.017508724704384804, 0.13819684088230133, 0.07529091835021973, 0.049319278448820114, 0.02712484821677208, -0.02621256746351719, 0.044090140610933304, 0.09565988928079605, 0.07962962239980698, -0.10916294157505035, -0.1604679375886917, -0.11994684487581253, -0.13095657527446747, -0.16392302513122559, 0.004496654961258173, -0.06586170941591263, 0.18117746710777283, -0.03872237354516983, 0.11193420737981796, 0.041638921946287155, 0.026169227436184883, -0.08820273727178574, 0.013304034247994423, -0.09264114499092102, -0.1457241326570511, -0.0022092843428254128, -0.07475346326828003, -0.0318228118121624, -0.029897509142756462, 0.290601909160614, 0.08101688325405121, -0.03663985803723335, 0.026480689644813538, 0.05505348742008209, -0.003962668590247631, 0.012575613334774971, 0.35179921984672546, 0.15750259160995483, -0.012086351402103901, -0.03858097270131111, -0.06981005519628525, 0.02239670231938362, -0.01422035787254572, 0.10713688284158707, 0.20385099947452545, 0.045665573328733444, 0.09199894964694977, 0.051136940717697144, -0.0017301609041169286, -0.03617826849222183, 0.05718815699219704, 0.08175966888666153, 0.05721388757228851, 0.024294331669807434, 0.06814810633659363, 0.17103363573551178, -0.12149249017238617, 0.05521564930677414, -0.05943729728460312, -0.09388432651758194, -0.10002854466438293, -0.19991672039031982, -0.062029000371694565, -0.11541001498699188, -0.028351537883281708, -0.04418467357754707, -0.022505223751068115, 0.17045357823371887, 0.031155474483966827, -0.04486522078514099, 0.06109369173645973, -0.16907331347465515, -0.03306376934051514, 0.043775562196969986, -0.029540354385972023, -0.024659257382154465, -0.08223645389080048, -0.03687271848320961, -0.009658833965659142, -0.03510158136487007, -0.02641434222459793, 0.041613008826971054, -0.063723623752594, -0.01931576058268547, -0.10874689370393753, -0.07217442244291306, -0.05319926142692566, -0.021090440452098846, 0.0458768755197525, 0.171823650598526, 0.06132342666387558, -0.017389921471476555, 0.06629563868045807, 0.14329472184181213, -0.00838939193636179, -0.2467908412218094, -0.02564666047692299, 0.05790325999259949, -0.01296290848404169, -0.030245792120695114, -0.018601981922984123, -0.10094865411520004, -0.05968039482831955, 0.2523779571056366, 0.19986440241336823, -0.0027798721566796303, -0.030820317566394806, -0.08956241607666016, 0.018129264935851097, -0.02315986342728138, 0.1393720507621765, 0.07709870487451553, 0.053138453513383865, 0.005100976210087538, 0.057174209505319595, -0.013538751751184464, -0.05222397670149803, -0.21587948501110077, 0.07999476045370102, 0.06699097156524658, -0.08093051612377167, -0.04095446690917015, 0.196256622672081, -0.09344784170389175, 0.12457575649023056, 0.05956362187862396, -0.06309816986322403, -0.12070171535015106, -0.013025282882153988, 0.03494537994265556, 0.07489099353551865, 0.011658195406198502, -0.032220508903265, -0.04011210799217224, -0.08209966123104095, 0.04889126494526863, -0.21445290744304657, -0.15913569927215576, 0.073812335729599, 0.05797286704182625, 0.19295139610767365, 0.017976978793740273, 0.06525919586420059, 0.056727319955825806, 0.021579498425126076, -0.08878548443317413, 0.21531768143177032, 0.04536481946706772, 0.16351155936717987, 0.020012876018881798, 0.006115299183875322, -0.06657800823450089, -0.1203504353761673, 0.09277291595935822, -0.03636255860328674, -0.012359881773591042, 0.1719062477350235, 0.015704216435551643, -0.128008171916008, 0.014395542442798615, -0.10231147706508636, 0.07426007837057114, 0.08322428911924362, -0.06613516807556152, -0.06032194197177887, -0.034896120429039, 0.04515561833977699, 0.010322289541363716, -0.17621372640132904, -0.14218249917030334, 0.06321631371974945, -0.06483768671751022, 0.06558042019605637, 0.10185535252094269, -0.11036550253629684, 0.06699710339307785, -0.08143414556980133, 0.04903606325387955, -0.06203988939523697, 0.03463492542505264, 0.02227565087378025, -0.03402342647314072, -0.010748478583991528, -0.33928102254867554, -0.0004425152437761426, 0.03687435761094093, -0.07402981072664261, -0.08882053196430206 ]
816e488c2580603415d57571af0f17d713c0e478
# Dataset Card for Evaluation run of aevalone/Pengland-Merge <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [aevalone/Pengland-Merge](https://huggingface.co/aevalone/Pengland-Merge) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_aevalone__Pengland-Merge", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-01-25T06:50:36.319568](https://huggingface.co/datasets/open-llm-leaderboard/details_aevalone__Pengland-Merge/blob/main/results_2024-01-25T06-50-36.319568.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.4951138346481346, "acc_stderr": 0.03407816807613722, "acc_norm": 0.5048451795300618, "acc_norm_stderr": 0.03500738607918216, "mc1": 0.2215422276621787, "mc1_stderr": 0.014537867601301145, "mc2": 0.47030169673148503, "mc2_stderr": 0.016478296846766455 }, "harness|arc:challenge|25": { "acc": 0.37627986348122866, "acc_stderr": 0.014157022555407172, "acc_norm": 0.4052901023890785, "acc_norm_stderr": 0.014346869060229325 }, "harness|hellaswag|10": { "acc": 0.32613025293766185, "acc_stderr": 0.004678375103797975, "acc_norm": 0.4706233817964549, "acc_norm_stderr": 0.004981161746388227 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.27, "acc_stderr": 0.04461960433384741, "acc_norm": 0.27, "acc_norm_stderr": 0.04461960433384741 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.48148148148148145, "acc_stderr": 0.043163785995113245, "acc_norm": 0.48148148148148145, "acc_norm_stderr": 0.043163785995113245 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.6052631578947368, "acc_stderr": 0.039777499346220734, "acc_norm": 0.6052631578947368, "acc_norm_stderr": 0.039777499346220734 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.51, "acc_stderr": 0.05024183937956912, "acc_norm": 0.51, "acc_norm_stderr": 0.05024183937956912 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.6150943396226415, "acc_stderr": 0.02994649856769995, "acc_norm": 0.6150943396226415, "acc_norm_stderr": 0.02994649856769995 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.5972222222222222, "acc_stderr": 0.04101405519842426, "acc_norm": 0.5972222222222222, "acc_norm_stderr": 0.04101405519842426 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.42, "acc_stderr": 0.049604496374885836, "acc_norm": 0.42, "acc_norm_stderr": 0.049604496374885836 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.38, "acc_stderr": 0.04878317312145633, "acc_norm": 0.38, "acc_norm_stderr": 0.04878317312145633 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.34, "acc_stderr": 0.04760952285695235, "acc_norm": 0.34, "acc_norm_stderr": 0.04760952285695235 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.5202312138728323, "acc_stderr": 0.03809342081273957, "acc_norm": 0.5202312138728323, "acc_norm_stderr": 0.03809342081273957 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.3431372549019608, "acc_stderr": 0.047240073523838876, "acc_norm": 0.3431372549019608, "acc_norm_stderr": 0.047240073523838876 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.7, "acc_stderr": 0.046056618647183814, "acc_norm": 0.7, "acc_norm_stderr": 0.046056618647183814 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.4765957446808511, "acc_stderr": 0.032650194750335815, "acc_norm": 0.4765957446808511, "acc_norm_stderr": 0.032650194750335815 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.41228070175438597, "acc_stderr": 0.04630653203366595, "acc_norm": 0.41228070175438597, "acc_norm_stderr": 0.04630653203366595 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.5172413793103449, "acc_stderr": 0.04164188720169375, "acc_norm": 0.5172413793103449, "acc_norm_stderr": 0.04164188720169375 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.3915343915343915, "acc_stderr": 0.025138091388851112, "acc_norm": 0.3915343915343915, "acc_norm_stderr": 0.025138091388851112 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.2619047619047619, "acc_stderr": 0.039325376803928704, "acc_norm": 0.2619047619047619, "acc_norm_stderr": 0.039325376803928704 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.34, "acc_stderr": 0.04760952285695236, "acc_norm": 0.34, "acc_norm_stderr": 0.04760952285695236 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.5064516129032258, "acc_stderr": 0.02844163823354051, "acc_norm": 0.5064516129032258, "acc_norm_stderr": 0.02844163823354051 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.3793103448275862, "acc_stderr": 0.034139638059062345, "acc_norm": 0.3793103448275862, "acc_norm_stderr": 0.034139638059062345 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.51, "acc_stderr": 0.05024183937956912, "acc_norm": 0.51, "acc_norm_stderr": 0.05024183937956912 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.3515151515151515, "acc_stderr": 0.0372820699868265, "acc_norm": 0.3515151515151515, "acc_norm_stderr": 0.0372820699868265 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.6616161616161617, "acc_stderr": 0.03371124142626302, "acc_norm": 0.6616161616161617, "acc_norm_stderr": 0.03371124142626302 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.7927461139896373, "acc_stderr": 0.02925282329180363, "acc_norm": 0.7927461139896373, "acc_norm_stderr": 0.02925282329180363 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.4897435897435897, "acc_stderr": 0.025345672221942374, "acc_norm": 0.4897435897435897, "acc_norm_stderr": 0.025345672221942374 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.23333333333333334, "acc_stderr": 0.025787874220959316, "acc_norm": 0.23333333333333334, "acc_norm_stderr": 0.025787874220959316 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.48739495798319327, "acc_stderr": 0.03246816765752174, "acc_norm": 0.48739495798319327, "acc_norm_stderr": 0.03246816765752174 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.31125827814569534, "acc_stderr": 0.03780445850526733, "acc_norm": 0.31125827814569534, "acc_norm_stderr": 0.03780445850526733 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.728440366972477, "acc_stderr": 0.019069098363191428, "acc_norm": 0.728440366972477, "acc_norm_stderr": 0.019069098363191428 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.3101851851851852, "acc_stderr": 0.03154696285656628, "acc_norm": 0.3101851851851852, "acc_norm_stderr": 0.03154696285656628 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.45588235294117646, "acc_stderr": 0.03495624522015474, "acc_norm": 0.45588235294117646, "acc_norm_stderr": 0.03495624522015474 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.5780590717299579, "acc_stderr": 0.032148146302403695, "acc_norm": 0.5780590717299579, "acc_norm_stderr": 0.032148146302403695 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.5874439461883408, "acc_stderr": 0.03304062175449297, "acc_norm": 0.5874439461883408, "acc_norm_stderr": 0.03304062175449297 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.648854961832061, "acc_stderr": 0.04186445163013751, "acc_norm": 0.648854961832061, "acc_norm_stderr": 0.04186445163013751 }, "harness|hendrycksTest-international_law|5": { "acc": 0.6942148760330579, "acc_stderr": 0.04205953933884123, "acc_norm": 0.6942148760330579, "acc_norm_stderr": 0.04205953933884123 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.5740740740740741, "acc_stderr": 0.0478034362693679, "acc_norm": 0.5740740740740741, "acc_norm_stderr": 0.0478034362693679 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.588957055214724, "acc_stderr": 0.038656978537853624, "acc_norm": 0.588957055214724, "acc_norm_stderr": 0.038656978537853624 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.4017857142857143, "acc_stderr": 0.04653333146973646, "acc_norm": 0.4017857142857143, "acc_norm_stderr": 0.04653333146973646 }, "harness|hendrycksTest-management|5": { "acc": 0.7087378640776699, "acc_stderr": 0.04498676320572924, "acc_norm": 0.7087378640776699, "acc_norm_stderr": 0.04498676320572924 }, "harness|hendrycksTest-marketing|5": { "acc": 0.7863247863247863, "acc_stderr": 0.026853450377009137, "acc_norm": 0.7863247863247863, "acc_norm_stderr": 0.026853450377009137 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.5, "acc_stderr": 0.050251890762960605, "acc_norm": 0.5, "acc_norm_stderr": 0.050251890762960605 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.7151979565772669, "acc_stderr": 0.016139174096522574, "acc_norm": 0.7151979565772669, "acc_norm_stderr": 0.016139174096522574 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.5751445086705202, "acc_stderr": 0.026613350840261743, "acc_norm": 0.5751445086705202, "acc_norm_stderr": 0.026613350840261743 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.25251396648044694, "acc_stderr": 0.01453033020146866, "acc_norm": 0.25251396648044694, "acc_norm_stderr": 0.01453033020146866 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.5751633986928104, "acc_stderr": 0.028304576673141107, "acc_norm": 0.5751633986928104, "acc_norm_stderr": 0.028304576673141107 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.5819935691318328, "acc_stderr": 0.02801365189199507, "acc_norm": 0.5819935691318328, "acc_norm_stderr": 0.02801365189199507 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.5648148148148148, "acc_stderr": 0.027586006221607718, "acc_norm": 0.5648148148148148, "acc_norm_stderr": 0.027586006221607718 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.4219858156028369, "acc_stderr": 0.029462189233370604, "acc_norm": 0.4219858156028369, "acc_norm_stderr": 0.029462189233370604 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.34876140808344197, "acc_stderr": 0.01217203515712712, "acc_norm": 0.34876140808344197, "acc_norm_stderr": 0.01217203515712712 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.4007352941176471, "acc_stderr": 0.029768263528933112, "acc_norm": 0.4007352941176471, "acc_norm_stderr": 0.029768263528933112 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.5212418300653595, "acc_stderr": 0.02020957238860025, "acc_norm": 0.5212418300653595, "acc_norm_stderr": 0.02020957238860025 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.5818181818181818, "acc_stderr": 0.04724577405731572, "acc_norm": 0.5818181818181818, "acc_norm_stderr": 0.04724577405731572 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.5551020408163265, "acc_stderr": 0.03181425118197786, "acc_norm": 0.5551020408163265, "acc_norm_stderr": 0.03181425118197786 }, "harness|hendrycksTest-sociology|5": { "acc": 0.4427860696517413, "acc_stderr": 0.03512310964123938, "acc_norm": 0.4427860696517413, "acc_norm_stderr": 0.03512310964123938 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.74, "acc_stderr": 0.044084400227680794, "acc_norm": 0.74, "acc_norm_stderr": 0.044084400227680794 }, "harness|hendrycksTest-virology|5": { "acc": 0.42168674698795183, "acc_stderr": 0.03844453181770917, "acc_norm": 0.42168674698795183, "acc_norm_stderr": 0.03844453181770917 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.7426900584795322, "acc_stderr": 0.03352799844161865, "acc_norm": 0.7426900584795322, "acc_norm_stderr": 0.03352799844161865 }, "harness|truthfulqa:mc|0": { "mc1": 0.2215422276621787, "mc1_stderr": 0.014537867601301145, "mc2": 0.47030169673148503, "mc2_stderr": 0.016478296846766455 }, "harness|winogrande|5": { "acc": 0.5895816890292028, "acc_stderr": 0.013825107120035863 }, "harness|gsm8k|5": { "acc": 0.0, "acc_stderr": 0.0 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_aevalone__Pengland-Merge
[ "region:us" ]
2024-01-25T06:52:52+00:00
{"pretty_name": "Evaluation run of aevalone/Pengland-Merge", "dataset_summary": "Dataset automatically created during the evaluation run of model [aevalone/Pengland-Merge](https://huggingface.co/aevalone/Pengland-Merge) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_aevalone__Pengland-Merge\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-01-25T06:50:36.319568](https://huggingface.co/datasets/open-llm-leaderboard/details_aevalone__Pengland-Merge/blob/main/results_2024-01-25T06-50-36.319568.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.4951138346481346,\n \"acc_stderr\": 0.03407816807613722,\n \"acc_norm\": 0.5048451795300618,\n \"acc_norm_stderr\": 0.03500738607918216,\n \"mc1\": 0.2215422276621787,\n \"mc1_stderr\": 0.014537867601301145,\n \"mc2\": 0.47030169673148503,\n \"mc2_stderr\": 0.016478296846766455\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.37627986348122866,\n \"acc_stderr\": 0.014157022555407172,\n \"acc_norm\": 0.4052901023890785,\n \"acc_norm_stderr\": 0.014346869060229325\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.32613025293766185,\n \"acc_stderr\": 0.004678375103797975,\n \"acc_norm\": 0.4706233817964549,\n \"acc_norm_stderr\": 0.004981161746388227\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.27,\n \"acc_stderr\": 0.04461960433384741,\n \"acc_norm\": 0.27,\n \"acc_norm_stderr\": 0.04461960433384741\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.48148148148148145,\n \"acc_stderr\": 0.043163785995113245,\n \"acc_norm\": 0.48148148148148145,\n \"acc_norm_stderr\": 0.043163785995113245\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.6052631578947368,\n \"acc_stderr\": 0.039777499346220734,\n \"acc_norm\": 0.6052631578947368,\n \"acc_norm_stderr\": 0.039777499346220734\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.51,\n \"acc_stderr\": 0.05024183937956912,\n \"acc_norm\": 0.51,\n \"acc_norm_stderr\": 0.05024183937956912\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.6150943396226415,\n \"acc_stderr\": 0.02994649856769995,\n \"acc_norm\": 0.6150943396226415,\n \"acc_norm_stderr\": 0.02994649856769995\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.5972222222222222,\n \"acc_stderr\": 0.04101405519842426,\n \"acc_norm\": 0.5972222222222222,\n \"acc_norm_stderr\": 0.04101405519842426\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.42,\n \"acc_stderr\": 0.049604496374885836,\n \"acc_norm\": 0.42,\n \"acc_norm_stderr\": 0.049604496374885836\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.38,\n \"acc_stderr\": 0.04878317312145633,\n \"acc_norm\": 0.38,\n \"acc_norm_stderr\": 0.04878317312145633\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.34,\n \"acc_stderr\": 0.04760952285695235,\n \"acc_norm\": 0.34,\n \"acc_norm_stderr\": 0.04760952285695235\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.5202312138728323,\n \"acc_stderr\": 0.03809342081273957,\n \"acc_norm\": 0.5202312138728323,\n \"acc_norm_stderr\": 0.03809342081273957\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.3431372549019608,\n \"acc_stderr\": 0.047240073523838876,\n \"acc_norm\": 0.3431372549019608,\n \"acc_norm_stderr\": 0.047240073523838876\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.7,\n \"acc_stderr\": 0.046056618647183814,\n \"acc_norm\": 0.7,\n \"acc_norm_stderr\": 0.046056618647183814\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.4765957446808511,\n \"acc_stderr\": 0.032650194750335815,\n \"acc_norm\": 0.4765957446808511,\n \"acc_norm_stderr\": 0.032650194750335815\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.41228070175438597,\n \"acc_stderr\": 0.04630653203366595,\n \"acc_norm\": 0.41228070175438597,\n \"acc_norm_stderr\": 0.04630653203366595\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.5172413793103449,\n \"acc_stderr\": 0.04164188720169375,\n \"acc_norm\": 0.5172413793103449,\n \"acc_norm_stderr\": 0.04164188720169375\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.3915343915343915,\n \"acc_stderr\": 0.025138091388851112,\n \"acc_norm\": 0.3915343915343915,\n \"acc_norm_stderr\": 0.025138091388851112\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.2619047619047619,\n \"acc_stderr\": 0.039325376803928704,\n \"acc_norm\": 0.2619047619047619,\n \"acc_norm_stderr\": 0.039325376803928704\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.34,\n \"acc_stderr\": 0.04760952285695236,\n \"acc_norm\": 0.34,\n \"acc_norm_stderr\": 0.04760952285695236\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.5064516129032258,\n \"acc_stderr\": 0.02844163823354051,\n \"acc_norm\": 0.5064516129032258,\n \"acc_norm_stderr\": 0.02844163823354051\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.3793103448275862,\n \"acc_stderr\": 0.034139638059062345,\n \"acc_norm\": 0.3793103448275862,\n \"acc_norm_stderr\": 0.034139638059062345\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.51,\n \"acc_stderr\": 0.05024183937956912,\n \"acc_norm\": 0.51,\n \"acc_norm_stderr\": 0.05024183937956912\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.3515151515151515,\n \"acc_stderr\": 0.0372820699868265,\n \"acc_norm\": 0.3515151515151515,\n \"acc_norm_stderr\": 0.0372820699868265\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.6616161616161617,\n \"acc_stderr\": 0.03371124142626302,\n \"acc_norm\": 0.6616161616161617,\n \"acc_norm_stderr\": 0.03371124142626302\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.7927461139896373,\n \"acc_stderr\": 0.02925282329180363,\n \"acc_norm\": 0.7927461139896373,\n \"acc_norm_stderr\": 0.02925282329180363\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.4897435897435897,\n \"acc_stderr\": 0.025345672221942374,\n \"acc_norm\": 0.4897435897435897,\n \"acc_norm_stderr\": 0.025345672221942374\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.23333333333333334,\n \"acc_stderr\": 0.025787874220959316,\n \"acc_norm\": 0.23333333333333334,\n \"acc_norm_stderr\": 0.025787874220959316\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.48739495798319327,\n \"acc_stderr\": 0.03246816765752174,\n \"acc_norm\": 0.48739495798319327,\n \"acc_norm_stderr\": 0.03246816765752174\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.31125827814569534,\n \"acc_stderr\": 0.03780445850526733,\n \"acc_norm\": 0.31125827814569534,\n \"acc_norm_stderr\": 0.03780445850526733\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.728440366972477,\n \"acc_stderr\": 0.019069098363191428,\n \"acc_norm\": 0.728440366972477,\n \"acc_norm_stderr\": 0.019069098363191428\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.3101851851851852,\n \"acc_stderr\": 0.03154696285656628,\n \"acc_norm\": 0.3101851851851852,\n \"acc_norm_stderr\": 0.03154696285656628\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.45588235294117646,\n \"acc_stderr\": 0.03495624522015474,\n \"acc_norm\": 0.45588235294117646,\n \"acc_norm_stderr\": 0.03495624522015474\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.5780590717299579,\n \"acc_stderr\": 0.032148146302403695,\n \"acc_norm\": 0.5780590717299579,\n \"acc_norm_stderr\": 0.032148146302403695\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.5874439461883408,\n \"acc_stderr\": 0.03304062175449297,\n \"acc_norm\": 0.5874439461883408,\n \"acc_norm_stderr\": 0.03304062175449297\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.648854961832061,\n \"acc_stderr\": 0.04186445163013751,\n \"acc_norm\": 0.648854961832061,\n \"acc_norm_stderr\": 0.04186445163013751\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.6942148760330579,\n \"acc_stderr\": 0.04205953933884123,\n \"acc_norm\": 0.6942148760330579,\n \"acc_norm_stderr\": 0.04205953933884123\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.5740740740740741,\n \"acc_stderr\": 0.0478034362693679,\n \"acc_norm\": 0.5740740740740741,\n \"acc_norm_stderr\": 0.0478034362693679\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.588957055214724,\n \"acc_stderr\": 0.038656978537853624,\n \"acc_norm\": 0.588957055214724,\n \"acc_norm_stderr\": 0.038656978537853624\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.4017857142857143,\n \"acc_stderr\": 0.04653333146973646,\n \"acc_norm\": 0.4017857142857143,\n \"acc_norm_stderr\": 0.04653333146973646\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.7087378640776699,\n \"acc_stderr\": 0.04498676320572924,\n \"acc_norm\": 0.7087378640776699,\n \"acc_norm_stderr\": 0.04498676320572924\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.7863247863247863,\n \"acc_stderr\": 0.026853450377009137,\n \"acc_norm\": 0.7863247863247863,\n \"acc_norm_stderr\": 0.026853450377009137\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.5,\n \"acc_stderr\": 0.050251890762960605,\n \"acc_norm\": 0.5,\n \"acc_norm_stderr\": 0.050251890762960605\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.7151979565772669,\n \"acc_stderr\": 0.016139174096522574,\n \"acc_norm\": 0.7151979565772669,\n \"acc_norm_stderr\": 0.016139174096522574\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.5751445086705202,\n \"acc_stderr\": 0.026613350840261743,\n \"acc_norm\": 0.5751445086705202,\n \"acc_norm_stderr\": 0.026613350840261743\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.25251396648044694,\n \"acc_stderr\": 0.01453033020146866,\n \"acc_norm\": 0.25251396648044694,\n \"acc_norm_stderr\": 0.01453033020146866\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.5751633986928104,\n \"acc_stderr\": 0.028304576673141107,\n \"acc_norm\": 0.5751633986928104,\n \"acc_norm_stderr\": 0.028304576673141107\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.5819935691318328,\n \"acc_stderr\": 0.02801365189199507,\n \"acc_norm\": 0.5819935691318328,\n \"acc_norm_stderr\": 0.02801365189199507\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.5648148148148148,\n \"acc_stderr\": 0.027586006221607718,\n \"acc_norm\": 0.5648148148148148,\n \"acc_norm_stderr\": 0.027586006221607718\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.4219858156028369,\n \"acc_stderr\": 0.029462189233370604,\n \"acc_norm\": 0.4219858156028369,\n \"acc_norm_stderr\": 0.029462189233370604\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.34876140808344197,\n \"acc_stderr\": 0.01217203515712712,\n \"acc_norm\": 0.34876140808344197,\n \"acc_norm_stderr\": 0.01217203515712712\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.4007352941176471,\n \"acc_stderr\": 0.029768263528933112,\n \"acc_norm\": 0.4007352941176471,\n \"acc_norm_stderr\": 0.029768263528933112\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.5212418300653595,\n \"acc_stderr\": 0.02020957238860025,\n \"acc_norm\": 0.5212418300653595,\n \"acc_norm_stderr\": 0.02020957238860025\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.5818181818181818,\n \"acc_stderr\": 0.04724577405731572,\n \"acc_norm\": 0.5818181818181818,\n \"acc_norm_stderr\": 0.04724577405731572\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.5551020408163265,\n \"acc_stderr\": 0.03181425118197786,\n \"acc_norm\": 0.5551020408163265,\n \"acc_norm_stderr\": 0.03181425118197786\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.4427860696517413,\n \"acc_stderr\": 0.03512310964123938,\n \"acc_norm\": 0.4427860696517413,\n \"acc_norm_stderr\": 0.03512310964123938\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.74,\n \"acc_stderr\": 0.044084400227680794,\n \"acc_norm\": 0.74,\n \"acc_norm_stderr\": 0.044084400227680794\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.42168674698795183,\n \"acc_stderr\": 0.03844453181770917,\n \"acc_norm\": 0.42168674698795183,\n \"acc_norm_stderr\": 0.03844453181770917\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.7426900584795322,\n \"acc_stderr\": 0.03352799844161865,\n \"acc_norm\": 0.7426900584795322,\n \"acc_norm_stderr\": 0.03352799844161865\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.2215422276621787,\n \"mc1_stderr\": 0.014537867601301145,\n \"mc2\": 0.47030169673148503,\n \"mc2_stderr\": 0.016478296846766455\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.5895816890292028,\n \"acc_stderr\": 0.013825107120035863\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.0,\n \"acc_stderr\": 0.0\n }\n}\n```", "repo_url": "https://huggingface.co/aevalone/Pengland-Merge", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_01_25T06_50_36.319568", "path": ["**/details_harness|arc:challenge|25_2024-01-25T06-50-36.319568.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-01-25T06-50-36.319568.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_01_25T06_50_36.319568", "path": ["**/details_harness|gsm8k|5_2024-01-25T06-50-36.319568.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-01-25T06-50-36.319568.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_01_25T06_50_36.319568", "path": ["**/details_harness|hellaswag|10_2024-01-25T06-50-36.319568.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-01-25T06-50-36.319568.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_01_25T06_50_36.319568", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-25T06-50-36.319568.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-25T06-50-36.319568.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-25T06-50-36.319568.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-25T06-50-36.319568.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-25T06-50-36.319568.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-25T06-50-36.319568.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-25T06-50-36.319568.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-25T06-50-36.319568.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-25T06-50-36.319568.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-25T06-50-36.319568.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-25T06-50-36.319568.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-25T06-50-36.319568.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-25T06-50-36.319568.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-25T06-50-36.319568.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-25T06-50-36.319568.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-25T06-50-36.319568.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-25T06-50-36.319568.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-25T06-50-36.319568.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-25T06-50-36.319568.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-25T06-50-36.319568.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-25T06-50-36.319568.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-25T06-50-36.319568.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-25T06-50-36.319568.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-25T06-50-36.319568.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-25T06-50-36.319568.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-25T06-50-36.319568.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-25T06-50-36.319568.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-25T06-50-36.319568.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-25T06-50-36.319568.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-25T06-50-36.319568.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-25T06-50-36.319568.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-25T06-50-36.319568.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-25T06-50-36.319568.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-25T06-50-36.319568.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-25T06-50-36.319568.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-25T06-50-36.319568.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-25T06-50-36.319568.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-25T06-50-36.319568.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-25T06-50-36.319568.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-25T06-50-36.319568.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-25T06-50-36.319568.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-25T06-50-36.319568.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-25T06-50-36.319568.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-25T06-50-36.319568.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-25T06-50-36.319568.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-25T06-50-36.319568.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-25T06-50-36.319568.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-25T06-50-36.319568.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-25T06-50-36.319568.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-25T06-50-36.319568.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-25T06-50-36.319568.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-25T06-50-36.319568.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-25T06-50-36.319568.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-25T06-50-36.319568.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-25T06-50-36.319568.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-25T06-50-36.319568.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-25T06-50-36.319568.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-25T06-50-36.319568.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-25T06-50-36.319568.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-25T06-50-36.319568.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-25T06-50-36.319568.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-25T06-50-36.319568.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-25T06-50-36.319568.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-25T06-50-36.319568.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-25T06-50-36.319568.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-25T06-50-36.319568.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-25T06-50-36.319568.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-25T06-50-36.319568.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-25T06-50-36.319568.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-25T06-50-36.319568.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-25T06-50-36.319568.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-25T06-50-36.319568.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-25T06-50-36.319568.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-25T06-50-36.319568.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-25T06-50-36.319568.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-25T06-50-36.319568.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-25T06-50-36.319568.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-25T06-50-36.319568.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-25T06-50-36.319568.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-25T06-50-36.319568.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-25T06-50-36.319568.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-25T06-50-36.319568.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-25T06-50-36.319568.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-25T06-50-36.319568.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-25T06-50-36.319568.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-25T06-50-36.319568.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-25T06-50-36.319568.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-25T06-50-36.319568.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-25T06-50-36.319568.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-25T06-50-36.319568.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-25T06-50-36.319568.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-25T06-50-36.319568.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-25T06-50-36.319568.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-25T06-50-36.319568.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-25T06-50-36.319568.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-25T06-50-36.319568.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-25T06-50-36.319568.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-25T06-50-36.319568.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-25T06-50-36.319568.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-25T06-50-36.319568.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-25T06-50-36.319568.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-25T06-50-36.319568.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-25T06-50-36.319568.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-25T06-50-36.319568.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-25T06-50-36.319568.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-25T06-50-36.319568.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-25T06-50-36.319568.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-25T06-50-36.319568.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-25T06-50-36.319568.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-25T06-50-36.319568.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-25T06-50-36.319568.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-25T06-50-36.319568.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-25T06-50-36.319568.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-25T06-50-36.319568.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_01_25T06_50_36.319568", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-25T06-50-36.319568.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-25T06-50-36.319568.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_01_25T06_50_36.319568", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-25T06-50-36.319568.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-25T06-50-36.319568.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_01_25T06_50_36.319568", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-25T06-50-36.319568.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-25T06-50-36.319568.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_01_25T06_50_36.319568", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-25T06-50-36.319568.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-25T06-50-36.319568.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_01_25T06_50_36.319568", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-25T06-50-36.319568.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-25T06-50-36.319568.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_01_25T06_50_36.319568", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-25T06-50-36.319568.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-25T06-50-36.319568.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_01_25T06_50_36.319568", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-25T06-50-36.319568.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-25T06-50-36.319568.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_01_25T06_50_36.319568", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-25T06-50-36.319568.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-25T06-50-36.319568.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_01_25T06_50_36.319568", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-25T06-50-36.319568.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-25T06-50-36.319568.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_01_25T06_50_36.319568", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-25T06-50-36.319568.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-25T06-50-36.319568.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_01_25T06_50_36.319568", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-25T06-50-36.319568.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-25T06-50-36.319568.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_01_25T06_50_36.319568", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-25T06-50-36.319568.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-25T06-50-36.319568.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_01_25T06_50_36.319568", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-25T06-50-36.319568.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-25T06-50-36.319568.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_01_25T06_50_36.319568", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-25T06-50-36.319568.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-25T06-50-36.319568.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_01_25T06_50_36.319568", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-25T06-50-36.319568.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-25T06-50-36.319568.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_01_25T06_50_36.319568", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-25T06-50-36.319568.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-25T06-50-36.319568.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_01_25T06_50_36.319568", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-25T06-50-36.319568.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-25T06-50-36.319568.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_01_25T06_50_36.319568", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-25T06-50-36.319568.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-25T06-50-36.319568.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_01_25T06_50_36.319568", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-25T06-50-36.319568.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-25T06-50-36.319568.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_01_25T06_50_36.319568", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-25T06-50-36.319568.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-25T06-50-36.319568.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_01_25T06_50_36.319568", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-25T06-50-36.319568.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-25T06-50-36.319568.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_01_25T06_50_36.319568", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-25T06-50-36.319568.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-25T06-50-36.319568.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_01_25T06_50_36.319568", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-25T06-50-36.319568.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-25T06-50-36.319568.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_01_25T06_50_36.319568", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-25T06-50-36.319568.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-25T06-50-36.319568.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_01_25T06_50_36.319568", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-25T06-50-36.319568.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-25T06-50-36.319568.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_01_25T06_50_36.319568", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-25T06-50-36.319568.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-25T06-50-36.319568.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_01_25T06_50_36.319568", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-25T06-50-36.319568.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-25T06-50-36.319568.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_01_25T06_50_36.319568", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-25T06-50-36.319568.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-25T06-50-36.319568.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_01_25T06_50_36.319568", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-25T06-50-36.319568.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-25T06-50-36.319568.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_01_25T06_50_36.319568", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-25T06-50-36.319568.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-25T06-50-36.319568.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_01_25T06_50_36.319568", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-25T06-50-36.319568.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-25T06-50-36.319568.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_01_25T06_50_36.319568", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-25T06-50-36.319568.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-25T06-50-36.319568.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_01_25T06_50_36.319568", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-25T06-50-36.319568.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-25T06-50-36.319568.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_01_25T06_50_36.319568", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-25T06-50-36.319568.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-25T06-50-36.319568.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_01_25T06_50_36.319568", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-25T06-50-36.319568.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-25T06-50-36.319568.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_01_25T06_50_36.319568", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-25T06-50-36.319568.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-25T06-50-36.319568.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_01_25T06_50_36.319568", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-25T06-50-36.319568.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-25T06-50-36.319568.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_01_25T06_50_36.319568", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-25T06-50-36.319568.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-25T06-50-36.319568.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_01_25T06_50_36.319568", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-25T06-50-36.319568.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-25T06-50-36.319568.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_01_25T06_50_36.319568", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-25T06-50-36.319568.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-25T06-50-36.319568.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_01_25T06_50_36.319568", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-25T06-50-36.319568.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-25T06-50-36.319568.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_01_25T06_50_36.319568", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-25T06-50-36.319568.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-25T06-50-36.319568.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_01_25T06_50_36.319568", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-25T06-50-36.319568.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-25T06-50-36.319568.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_01_25T06_50_36.319568", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-25T06-50-36.319568.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-25T06-50-36.319568.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_01_25T06_50_36.319568", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-25T06-50-36.319568.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-25T06-50-36.319568.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_01_25T06_50_36.319568", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-25T06-50-36.319568.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-25T06-50-36.319568.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_01_25T06_50_36.319568", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-25T06-50-36.319568.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-25T06-50-36.319568.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_01_25T06_50_36.319568", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-25T06-50-36.319568.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-25T06-50-36.319568.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_01_25T06_50_36.319568", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-25T06-50-36.319568.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-25T06-50-36.319568.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_01_25T06_50_36.319568", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-25T06-50-36.319568.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-25T06-50-36.319568.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_01_25T06_50_36.319568", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-25T06-50-36.319568.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-25T06-50-36.319568.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_01_25T06_50_36.319568", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-25T06-50-36.319568.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-25T06-50-36.319568.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_01_25T06_50_36.319568", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-25T06-50-36.319568.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-25T06-50-36.319568.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_01_25T06_50_36.319568", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-25T06-50-36.319568.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-25T06-50-36.319568.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_01_25T06_50_36.319568", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-25T06-50-36.319568.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-25T06-50-36.319568.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_01_25T06_50_36.319568", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-25T06-50-36.319568.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-25T06-50-36.319568.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_01_25T06_50_36.319568", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-25T06-50-36.319568.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-25T06-50-36.319568.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_01_25T06_50_36.319568", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-25T06-50-36.319568.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-25T06-50-36.319568.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_01_25T06_50_36.319568", "path": ["**/details_harness|winogrande|5_2024-01-25T06-50-36.319568.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-01-25T06-50-36.319568.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_01_25T06_50_36.319568", "path": ["results_2024-01-25T06-50-36.319568.parquet"]}, {"split": "latest", "path": ["results_2024-01-25T06-50-36.319568.parquet"]}]}]}
2024-01-25T06:53:14+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of aevalone/Pengland-Merge Dataset automatically created during the evaluation run of model aevalone/Pengland-Merge on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-01-25T06:50:36.319568(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of aevalone/Pengland-Merge\n\n\n\nDataset automatically created during the evaluation run of model aevalone/Pengland-Merge on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-25T06:50:36.319568(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of aevalone/Pengland-Merge\n\n\n\nDataset automatically created during the evaluation run of model aevalone/Pengland-Merge on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-25T06:50:36.319568(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ 6, 179, 68, 4, 40, 29, 3, 4, 9, 6, 5, 7, 4, 7, 10, 9, 5, 9, 8, 10, 46, 8, 7, 10, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of aevalone/Pengland-Merge\n\n\n\nDataset automatically created during the evaluation run of model aevalone/Pengland-Merge on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2024-01-25T06:50:36.319568(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):## Dataset Details### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:## Uses### Direct Use### Out-of-Scope Use## Dataset Structure## Dataset Creation### Curation Rationale### Source Data#### Data Collection and Processing#### Who are the source data producers?### Annotations [optional]#### Annotation process#### Who are the annotators?#### Personal and Sensitive Information## Bias, Risks, and Limitations### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:## Glossary [optional]## More Information [optional]## Dataset Card Authors [optional]## Dataset Card Contact" ]
[ -0.04703520983457565, 0.18072183430194855, -0.006757005583494902, 0.03969904035329819, 0.07254254072904587, -0.008256683126091957, 0.03374924138188362, 0.09923462569713593, 0.03394525870680809, 0.1760336011648178, -0.016575094312429428, 0.09503638744354248, 0.0877995640039444, 0.11572378873825073, 0.036337580531835556, -0.13692685961723328, 0.039416637271642685, -0.10281355679035187, 0.11196493357419968, 0.06626475602388382, 0.057973455637693405, -0.08412908017635345, 0.06429681926965714, -0.02665502019226551, 0.04799873009324074, -0.0007018399774096906, -0.06895558536052704, -0.04149875417351723, 0.11190567165613174, 0.08818185329437256, 0.04053414985537529, -0.025712095201015472, 0.03843134269118309, -0.2683030366897583, 0.018432892858982086, 0.09846187382936478, 0.002103891223669052, 0.042958345264196396, 0.14889131486415863, -0.08267904072999954, 0.0803876593708992, -0.021344734355807304, 0.07075861096382141, 0.04913641884922981, -0.11043677479028702, -0.15760143101215363, -0.1422824263572693, 0.018820105120539665, 0.06776762753725052, 0.047343894839286804, -0.023628393188118935, 0.11223282665014267, -0.039831310510635376, 0.039107806980609894, 0.13994765281677246, -0.1776096373796463, -0.023407669737935066, 0.0404013991355896, 0.023175183683633804, 0.0711434856057167, -0.08729986101388931, -0.019950496032834053, 0.02546122670173645, 0.06247643753886223, 0.004747224505990744, 0.008699094876646996, 0.036104604601860046, 0.01733863353729248, -0.13889800012111664, -0.12846073508262634, 0.11886444687843323, -0.006252662744373083, -0.04504263773560524, -0.16850492358207703, -0.03797850385308266, 0.013641498982906342, 0.005440329667180777, 0.0178546030074358, 0.0063828229904174805, 0.0012966967187821865, 0.08022558689117432, -0.014802036806941032, -0.0955410748720169, -0.026360666379332542, -0.03427937999367714, 0.04227237403392792, 0.03482583165168762, 0.0001720427826512605, 0.009463593363761902, 0.137971431016922, 0.029722735285758972, -0.05437014624476433, -0.07762054353952408, -0.05024207755923271, -0.13033267855644226, -0.031679559499025345, 0.026160096749663353, -0.08251018077135086, 0.05077918991446495, 0.24660588800907135, -0.027588704600930214, 0.02209060825407505, -0.107208251953125, 0.01643185317516327, 0.11744074523448944, 0.09141381829977036, -0.08563780039548874, -0.07456468790769577, -0.048485513776540756, 0.019332801923155785, 0.023432770743966103, -0.02149929292500019, 0.02738056890666485, 0.06812537461519241, 0.01114677544683218, 0.13175931572914124, 0.1154729351401329, 0.022351965308189392, -0.07524092495441437, -0.011465181596577168, 0.16661392152309418, -0.16474191844463348, 0.005007233005017042, 0.02569999359548092, -0.020534824579954147, -0.08473291993141174, 0.06805495172739029, -0.013127746991813183, -0.05849498137831688, 0.11846878379583359, -0.052940599620342255, -0.07120901346206665, -0.08870130777359009, -0.06211398169398308, 0.032840415835380554, -0.0016237657982856035, -0.06399817764759064, -0.07595260441303253, -0.11782700568437576, -0.08008504658937454, 0.026827853173017502, -0.06393760442733765, -0.001465317327529192, 0.007932026870548725, 0.019430357962846756, -0.009714744985103607, -0.0155025040730834, 0.11200874298810959, -0.07050464302301407, 0.029792044311761856, -0.04960241913795471, 0.030519749969244003, 0.11863596737384796, 0.02990218997001648, -0.1154867559671402, 0.09159279614686966, -0.10209663212299347, 0.099906325340271, -0.09942438453435898, -0.026476548984646797, -0.12232423573732376, 0.021814316511154175, -0.02680005505681038, 0.0269017294049263, -0.017030013725161552, 0.08230122923851013, -0.18102869391441345, -0.013054143637418747, 0.17798787355422974, -0.11628276854753494, -0.05657869949936867, 0.08896451443433762, -0.02618739753961563, 0.0536927729845047, 0.04052451252937317, 0.08383770287036896, 0.07762354612350464, -0.08945483714342117, -0.11117050796747208, -0.057859163731336594, -0.03555607423186302, 0.15896403789520264, 0.06252247095108032, -0.09937652945518494, 0.09548848867416382, 0.03913530707359314, 0.0037698489613831043, -0.050371602177619934, -0.008339442312717438, -0.056046292185783386, -0.0057389638386666775, -0.04063335806131363, -0.056952543556690216, -0.026631513610482216, -0.0775832012295723, 0.0010063687805086374, -0.06379655003547668, -0.010110404342412949, 0.09679606556892395, -0.020776664838194847, 0.027084242552518845, -0.08156556636095047, 0.05573338270187378, -0.012358220294117928, 0.020893525332212448, -0.21016429364681244, -0.08556424826383591, 0.03738267719745636, -0.1869964301586151, 0.04996468126773834, -0.0027470116037875414, 0.022274350747466087, 0.06043742224574089, -0.0018453059019520879, 0.009308336302638054, 0.019048986956477165, -0.015714779496192932, -0.005869457498192787, -0.14608685672283173, -0.04529302939772606, -0.07877546548843384, 0.07034996896982193, -0.10510943830013275, -0.02110210992395878, 0.0530744306743145, 0.15561431646347046, 0.02110118605196476, -0.07514951378107071, 0.04695965349674225, 0.013599704951047897, -0.05119912698864937, -0.04863404855132103, -0.0039903284050524235, -0.017813831567764282, 0.03669925779104233, 0.07993583381175995, -0.17082460224628448, -0.13229380548000336, 0.06905724108219147, 0.14717204868793488, -0.07518821209669113, -0.065180204808712, -0.06774812936782837, -0.05981237441301346, -0.1013043150305748, -0.06265677511692047, 0.11456319689750671, 0.09441736340522766, 0.0515727698802948, -0.0659661740064621, -0.049059975892305374, -0.0062899840995669365, 0.04123577103018761, -0.07008503377437592, 0.1111045628786087, 0.09074383229017258, -0.06981407850980759, 0.09899589419364929, -0.018764540553092957, 0.10717515647411346, 0.10578467696905136, 0.009177078492939472, -0.11962432414293289, -0.008849142119288445, 0.06595797836780548, 0.05211230367422104, 0.06999695301055908, -0.012112628668546677, 0.03952919691801071, 0.08371350169181824, -0.004216073080897331, 0.03644499182701111, -0.06738847494125366, 0.03382530063390732, 0.02538786455988884, -0.0034231082536280155, 0.02860800176858902, 0.004454401787370443, 0.032748378813266754, 0.09125231206417084, 0.008711609058082104, 0.07905592024326324, -0.03764379397034645, -0.04553362727165222, -0.08440673351287842, 0.13512830436229706, -0.09048818796873093, -0.23221547901630402, -0.18086084723472595, -0.025610564276576042, -0.03710149973630905, -0.01438809558749199, 0.051625628024339676, 0.0126006118953228, -0.10260553658008575, -0.11558714509010315, 0.05729838088154793, 0.050956301391124725, -0.11812092363834381, -0.0273647028952837, 0.03282216191291809, -0.011489007622003555, -0.1615827977657318, 0.02726631984114647, 0.03916461020708084, -0.0687359943985939, 0.02103636972606182, 0.08745400607585907, 0.12201377004384995, 0.09538017958402634, 0.09157346934080124, -0.024149497970938683, -0.012517927214503288, 0.14154501259326935, -0.11495154350996017, 0.035106368362903595, 0.08828166872262955, -0.03811250254511833, 0.0762905403971672, 0.13921129703521729, 0.004754337947815657, -0.07445435225963593, 0.04382622614502907, 0.09845804423093796, -0.06322886049747467, -0.2580467462539673, -0.0880177840590477, -0.0296490415930748, 0.058035507798194885, 0.10101936757564545, 0.07800319045782089, -0.020777864381670952, -0.004315522965043783, -0.10524576902389526, -0.04565932974219322, -0.02252664417028427, 0.05802822858095169, 0.039367128163576126, -0.009708168916404247, 0.04693979024887085, -0.04906298592686653, 0.017690928652882576, 0.127778097987175, 0.03970450535416603, 0.17218123376369476, -0.03665218502283096, 0.17134417593479156, 0.0918252170085907, 0.07261041551828384, -0.030555009841918945, 0.06748928874731064, -0.013654578477144241, 0.07163892686367035, -0.013855372555553913, -0.09473570436239243, -0.020113974809646606, 0.09939146786928177, 0.05154140293598175, -0.0483725406229496, 0.057846084237098694, -0.06460700929164886, 0.06261712312698364, 0.2548690140247345, -0.0037480914033949375, -0.1260061264038086, -0.0370614267885685, 0.052848733961582184, -0.05191216245293617, -0.09462904185056686, 0.005270027089864016, 0.08913775533437729, -0.15594860911369324, 0.01937268301844597, -0.039779774844646454, 0.06796855479478836, -0.1379464864730835, -0.030612848699092865, -0.03462369740009308, 0.054465074092149734, -0.02275918424129486, 0.09895448386669159, -0.13557569682598114, 0.08665608614683151, -0.01638411171734333, 0.022026672959327698, -0.06227562576532364, 0.07348816841840744, -0.012990972958505154, -0.05615620315074921, 0.1549023538827896, -0.0048182569444179535, -0.10158351808786392, -0.06341038644313812, -0.1341681331396103, -0.011775310151278973, 0.03694138675928116, -0.12263953685760498, 0.10971980541944504, 0.01667567901313305, -0.02626737393438816, -0.04573296755552292, -0.012112148106098175, -0.08040088415145874, -0.22290708124637604, 0.0890195220708847, -0.12819232046604156, 0.057126522064208984, -0.05500500649213791, -0.04327647015452385, -0.059619780629873276, 0.13345608115196228, -0.12055429816246033, -0.06372638791799545, -0.10466621071100235, -0.04997720569372177, 0.16117341816425323, -0.06468582153320312, 0.0540190264582634, -0.04810566455125809, 0.1599666029214859, -0.05066295713186264, -0.049698155373334885, 0.009319378063082695, -0.07234002649784088, -0.17416702210903168, -0.04461676627397537, 0.11220786720514297, 0.06337064504623413, 0.010863780975341797, -0.011797170154750347, 0.04780402407050133, 0.017958393320441246, -0.09300167113542557, 0.050249483436346054, 0.13357222080230713, 0.11552418768405914, 0.05956948921084404, -0.03835483640432358, -0.11399807780981064, -0.09720014780759811, -0.0956302210688591, 0.06265313178300858, 0.19121156632900238, -0.06357108056545258, 0.14457185566425323, 0.14507900178432465, -0.11816757172346115, -0.20496153831481934, -0.10171997547149658, -0.012057035230100155, -0.012367025017738342, 0.11753921955823898, -0.20287026464939117, 0.0400075688958168, 0.09002066403627396, -0.03141799941658974, 0.11305295675992966, -0.28735804557800293, -0.14261609315872192, 0.046888481825590134, 0.028203586116433144, -0.18623748421669006, -0.14789341390132904, -0.09278522431850433, -0.005177618004381657, -0.11770854145288467, 0.09522558748722076, 0.0011296386364847422, 0.045277830213308334, -0.019869036972522736, 0.059113509953022, 0.03979559615254402, -0.06967594474554062, 0.12504620850086212, -0.01704331301152706, 0.03210511431097984, -0.08654509484767914, -0.012398128397762775, -0.014523175545036793, -0.04375821724534035, 0.08638879656791687, 0.020572086796164513, 0.02947346679866314, -0.07480762153863907, -0.03455493971705437, -0.05481656268239021, 0.045752041041851044, -0.05574604123830795, -0.060013189911842346, -0.05383921414613724, 0.08089645206928253, 0.07046012580394745, -0.0116975586861372, 0.038378678262233734, -0.04397272691130638, 0.04198649898171425, 0.2217152714729309, 0.06500191241502762, 0.04867503046989441, -0.11944790184497833, -0.047364138066768646, -0.018509723246097565, 0.0035597262904047966, -0.08455333858728409, 0.05105520412325859, 0.0887286588549614, 0.032029006630182266, 0.10656600445508957, -0.018473904579877853, -0.201666921377182, 0.010211662389338017, 0.07772743701934814, -0.10922703146934509, -0.21643342077732086, 0.04334133863449097, 0.06481140106916428, -0.11088467389345169, -0.07919329404830933, 0.09629706293344498, 0.023270588368177414, -0.024295685812830925, 0.0038842714857310057, 0.07840676605701447, 0.042244940996170044, 0.0865042507648468, -0.022576292976737022, 0.036168210208415985, -0.06416498124599457, 0.12296690046787262, 0.14916187524795532, -0.1045723631978035, 0.002004310255870223, 0.05917419120669365, -0.04438403248786926, -0.061029646545648575, -0.0381166897714138, 0.04292932525277138, 0.008706668391823769, -0.0331021286547184, -0.0015553322155028582, -0.053200121968984604, 0.07505764812231064, 0.12109830975532532, -0.015224051661789417, 0.06263147294521332, 0.02119365520775318, -0.009614762850105762, -0.04098247364163399, 0.11315536499023438, 0.03913547843694687, 0.04168005660176277, -0.02574859745800495, 0.02738446742296219, 0.019131770357489586, -0.02396659180521965, 0.01886633038520813, -0.0662095844745636, -0.059130728244781494, 0.013717996887862682, -0.18497276306152344, 0.0357588492333889, -0.08276813477277756, -0.012280681170523167, 0.004353515338152647, 0.005826217588037252, 0.004524662625044584, 0.011599956080317497, -0.059125591069459915, -0.05275005102157593, -0.04924740269780159, 0.13114723563194275, -0.2080000340938568, -0.009611573070287704, 0.08910482376813889, -0.0817737877368927, 0.0753304585814476, 0.002068166621029377, -0.00963453110307455, 0.00837817694991827, -0.08193889260292053, -0.0076236301101744175, -0.018765361979603767, 0.05390235781669617, 0.011239524930715561, -0.134069561958313, -0.014174730516970158, -0.00837021879851818, -0.08823280036449432, -0.000054037285735830665, 0.015099949203431606, -0.1463666707277298, 0.08556094020605087, 0.10070646554231644, -0.04299324378371239, -0.03131844848394394, 0.024995051324367523, 0.0357535220682621, 0.01527330745011568, 0.0918019637465477, -0.03115266002714634, 0.03677060082554817, -0.1530947983264923, -0.035921335220336914, 0.005368715617805719, 0.006465033628046513, 0.05703871697187424, 0.000871251046191901, 0.02113756723701954, -0.007147917989641428, 0.2178610861301422, -0.019632479175925255, 0.016514159739017487, 0.024367747828364372, -0.01529211737215519, -0.046023890376091, 0.03613641485571861, -0.039094697684049606, 0.003451706375926733, 0.01805523782968521, 0.0035894240718334913, -0.029358677566051483, -0.05092523992061615, -0.002087798435240984, 0.08132059872150421, 0.11682999134063721, 0.2100863754749298, -0.03289105370640755, 0.05675295367836952, -0.150029718875885, -0.08185502886772156, -0.020538154989480972, -0.08545106649398804, 0.05742298439145088, -0.0633266493678093, 0.05671551078557968, 0.10337696969509125, -0.12516531348228455, 0.14513523876667023, -0.040872152894735336, -0.02033945918083191, -0.04285936802625656, -0.1846892386674881, -0.03383788466453552, 0.03363228961825371, 0.007702795322984457, -0.07835505902767181, 0.10992779582738876, 0.12631912529468536, 0.009330418892204762, -0.0002681747719179839, 0.07585544139146805, -0.08056429773569107, -0.05849986523389816, -0.02557414211332798, 0.020115945488214493, 0.02481907047331333, 0.012183202430605888, 0.0614602230489254, 0.0022881007753312588, 0.048027243465185165, 0.06672845035791397, 0.10227504372596741, 0.04888411983847618, 0.0356888584792614, -0.03348097205162048, -0.0386483259499073, -0.0015021238941699266, -0.025304075330495834, -0.0735229030251503, 0.1683369278907776, 0.07274819910526276, 0.01996670477092266, 0.021238015964627266, 0.19771741330623627, -0.015341493301093578, -0.06602293998003006, -0.1390923708677292, 0.1574821174144745, -0.005585875362157822, 0.037437088787555695, 0.02448764629662037, -0.11352214962244034, 0.00002983058766403701, 0.15235216915607452, 0.10176482051610947, 0.019156722351908684, 0.01173559669405222, 0.03871357813477516, 0.022723998874425888, -0.02477441169321537, 0.04606512561440468, 0.03847997263073921, 0.22334334254264832, -0.054686836898326874, 0.09817975759506226, -0.029131757095456123, -0.003126539522781968, -0.04110383987426758, 0.12754659354686737, -0.06848734617233276, 0.02057354338467121, -0.06731949001550674, 0.06667237728834152, -0.06666271388530731, -0.2548816502094269, -0.007184287998825312, -0.061651937663555145, -0.1330345720052719, -0.005573473405092955, 0.032459158450365067, -0.025802677497267723, 0.04180464521050453, 0.041835542768239975, -0.026737580075860023, 0.1910303831100464, 0.013687573373317719, -0.05829203501343727, -0.07655566930770874, 0.05565262585878372, -0.06511496007442474, 0.28248393535614014, 0.006560825277119875, 0.018768589943647385, 0.07681763917207718, -0.017208268865942955, -0.13036692142486572, 0.03778188303112984, 0.08892553299665451, -0.048422254621982574, 0.03746909275650978, 0.13143862783908844, -0.015420753508806229, 0.14215464890003204, 0.032378312200307846, 0.023610811680555344, 0.0743611678481102, 0.042711954563856125, 0.025675863027572632, -0.07471219450235367, 0.060444992035627365, -0.07943661510944366, 0.11469767987728119, 0.11388961225748062, -0.006429334636777639, 0.011287804692983627, -0.05157356336712837, 0.04960038140416145, -0.050322357565164566, 0.11638079583644867, -0.01955355517566204, -0.1269492208957672, 0.050513435155153275, 0.015232624486088753, 0.08266083896160126, -0.21836145222187042, -0.07281553000211716, 0.09357912838459015, -0.055708739906549454, -0.01012535858899355, 0.09108053147792816, 0.045759525150060654, 0.02359735779464245, -0.04468705505132675, -0.12249775230884552, 0.031401168555021286, 0.09948824346065521, -0.06110542640089989, -0.033015962690114975 ]
4d59186a555aae98a7fc04a2fbf0d62311493215
# Dataset Card for Evaluation run of Weyaxi/Draco-8x7B <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [Weyaxi/Draco-8x7B](https://huggingface.co/Weyaxi/Draco-8x7B) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_Weyaxi__Draco-8x7B", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-01-25T06:52:14.231942](https://huggingface.co/datasets/open-llm-leaderboard/details_Weyaxi__Draco-8x7B/blob/main/results_2024-01-25T06-52-14.231942.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.6523117174425681, "acc_stderr": 0.031872005741893215, "acc_norm": 0.6530656365171571, "acc_norm_stderr": 0.03252074448359809, "mc1": 0.4528763769889841, "mc1_stderr": 0.01742558984831402, "mc2": 0.6265250771231468, "mc2_stderr": 0.015246664453677405 }, "harness|arc:challenge|25": { "acc": 0.6228668941979523, "acc_stderr": 0.014163366896192596, "acc_norm": 0.6501706484641638, "acc_norm_stderr": 0.013936809212158294 }, "harness|hellaswag|10": { "acc": 0.6653057159928301, "acc_stderr": 0.004709190850274406, "acc_norm": 0.8524198366859191, "acc_norm_stderr": 0.003539584491392116 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.33, "acc_stderr": 0.04725815626252606, "acc_norm": 0.33, "acc_norm_stderr": 0.04725815626252606 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.6518518518518519, "acc_stderr": 0.041153246103369526, "acc_norm": 0.6518518518518519, "acc_norm_stderr": 0.041153246103369526 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.6973684210526315, "acc_stderr": 0.037385206761196686, "acc_norm": 0.6973684210526315, "acc_norm_stderr": 0.037385206761196686 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.64, "acc_stderr": 0.04824181513244218, "acc_norm": 0.64, "acc_norm_stderr": 0.04824181513244218 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.720754716981132, "acc_stderr": 0.027611163402399715, "acc_norm": 0.720754716981132, "acc_norm_stderr": 0.027611163402399715 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.7361111111111112, "acc_stderr": 0.03685651095897532, "acc_norm": 0.7361111111111112, "acc_norm_stderr": 0.03685651095897532 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.44, "acc_stderr": 0.04988876515698589, "acc_norm": 0.44, "acc_norm_stderr": 0.04988876515698589 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.52, "acc_stderr": 0.050211673156867795, "acc_norm": 0.52, "acc_norm_stderr": 0.050211673156867795 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.33, "acc_stderr": 0.04725815626252604, "acc_norm": 0.33, "acc_norm_stderr": 0.04725815626252604 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.6878612716763006, "acc_stderr": 0.03533133389323657, "acc_norm": 0.6878612716763006, "acc_norm_stderr": 0.03533133389323657 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.4019607843137255, "acc_stderr": 0.048786087144669955, "acc_norm": 0.4019607843137255, "acc_norm_stderr": 0.048786087144669955 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.78, "acc_stderr": 0.04163331998932263, "acc_norm": 0.78, "acc_norm_stderr": 0.04163331998932263 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.5829787234042553, "acc_stderr": 0.03223276266711712, "acc_norm": 0.5829787234042553, "acc_norm_stderr": 0.03223276266711712 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.47368421052631576, "acc_stderr": 0.046970851366478626, "acc_norm": 0.47368421052631576, "acc_norm_stderr": 0.046970851366478626 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.5862068965517241, "acc_stderr": 0.04104269211806232, "acc_norm": 0.5862068965517241, "acc_norm_stderr": 0.04104269211806232 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.3941798941798942, "acc_stderr": 0.02516798233389414, "acc_norm": 0.3941798941798942, "acc_norm_stderr": 0.02516798233389414 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.46825396825396826, "acc_stderr": 0.04463112720677172, "acc_norm": 0.46825396825396826, "acc_norm_stderr": 0.04463112720677172 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.37, "acc_stderr": 0.04852365870939099, "acc_norm": 0.37, "acc_norm_stderr": 0.04852365870939099 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.7838709677419354, "acc_stderr": 0.02341529343356852, "acc_norm": 0.7838709677419354, "acc_norm_stderr": 0.02341529343356852 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.4827586206896552, "acc_stderr": 0.035158955511657, "acc_norm": 0.4827586206896552, "acc_norm_stderr": 0.035158955511657 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.7, "acc_stderr": 0.046056618647183814, "acc_norm": 0.7, "acc_norm_stderr": 0.046056618647183814 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.7878787878787878, "acc_stderr": 0.03192271569548301, "acc_norm": 0.7878787878787878, "acc_norm_stderr": 0.03192271569548301 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.7828282828282829, "acc_stderr": 0.029376616484945633, "acc_norm": 0.7828282828282829, "acc_norm_stderr": 0.029376616484945633 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.8808290155440415, "acc_stderr": 0.023381935348121437, "acc_norm": 0.8808290155440415, "acc_norm_stderr": 0.023381935348121437 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.6692307692307692, "acc_stderr": 0.02385479568097112, "acc_norm": 0.6692307692307692, "acc_norm_stderr": 0.02385479568097112 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.3592592592592593, "acc_stderr": 0.029252905927251972, "acc_norm": 0.3592592592592593, "acc_norm_stderr": 0.029252905927251972 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.6848739495798319, "acc_stderr": 0.030176808288974337, "acc_norm": 0.6848739495798319, "acc_norm_stderr": 0.030176808288974337 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.3576158940397351, "acc_stderr": 0.03913453431177258, "acc_norm": 0.3576158940397351, "acc_norm_stderr": 0.03913453431177258 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.8477064220183487, "acc_stderr": 0.015405084393157074, "acc_norm": 0.8477064220183487, "acc_norm_stderr": 0.015405084393157074 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.5324074074074074, "acc_stderr": 0.03402801581358966, "acc_norm": 0.5324074074074074, "acc_norm_stderr": 0.03402801581358966 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.8529411764705882, "acc_stderr": 0.024857478080250458, "acc_norm": 0.8529411764705882, "acc_norm_stderr": 0.024857478080250458 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.810126582278481, "acc_stderr": 0.025530100460233504, "acc_norm": 0.810126582278481, "acc_norm_stderr": 0.025530100460233504 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.695067264573991, "acc_stderr": 0.030898610882477515, "acc_norm": 0.695067264573991, "acc_norm_stderr": 0.030898610882477515 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.7709923664122137, "acc_stderr": 0.036853466317118506, "acc_norm": 0.7709923664122137, "acc_norm_stderr": 0.036853466317118506 }, "harness|hendrycksTest-international_law|5": { "acc": 0.8347107438016529, "acc_stderr": 0.03390780612972776, "acc_norm": 0.8347107438016529, "acc_norm_stderr": 0.03390780612972776 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.7777777777777778, "acc_stderr": 0.040191074725573483, "acc_norm": 0.7777777777777778, "acc_norm_stderr": 0.040191074725573483 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.8098159509202454, "acc_stderr": 0.03083349114628123, "acc_norm": 0.8098159509202454, "acc_norm_stderr": 0.03083349114628123 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.48214285714285715, "acc_stderr": 0.047427623612430116, "acc_norm": 0.48214285714285715, "acc_norm_stderr": 0.047427623612430116 }, "harness|hendrycksTest-management|5": { "acc": 0.8349514563106796, "acc_stderr": 0.036756688322331886, "acc_norm": 0.8349514563106796, "acc_norm_stderr": 0.036756688322331886 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8717948717948718, "acc_stderr": 0.02190190511507332, "acc_norm": 0.8717948717948718, "acc_norm_stderr": 0.02190190511507332 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.74, "acc_stderr": 0.0440844002276808, "acc_norm": 0.74, "acc_norm_stderr": 0.0440844002276808 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.8250319284802043, "acc_stderr": 0.01358661921990334, "acc_norm": 0.8250319284802043, "acc_norm_stderr": 0.01358661921990334 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.7341040462427746, "acc_stderr": 0.02378620325550829, "acc_norm": 0.7341040462427746, "acc_norm_stderr": 0.02378620325550829 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.2860335195530726, "acc_stderr": 0.015113972129062136, "acc_norm": 0.2860335195530726, "acc_norm_stderr": 0.015113972129062136 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.7320261437908496, "acc_stderr": 0.025360603796242557, "acc_norm": 0.7320261437908496, "acc_norm_stderr": 0.025360603796242557 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.7041800643086816, "acc_stderr": 0.02592237178881877, "acc_norm": 0.7041800643086816, "acc_norm_stderr": 0.02592237178881877 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.7530864197530864, "acc_stderr": 0.0239935017090421, "acc_norm": 0.7530864197530864, "acc_norm_stderr": 0.0239935017090421 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.5141843971631206, "acc_stderr": 0.02981549448368206, "acc_norm": 0.5141843971631206, "acc_norm_stderr": 0.02981549448368206 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.4641460234680574, "acc_stderr": 0.01273736131873058, "acc_norm": 0.4641460234680574, "acc_norm_stderr": 0.01273736131873058 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.6985294117647058, "acc_stderr": 0.027875982114273168, "acc_norm": 0.6985294117647058, "acc_norm_stderr": 0.027875982114273168 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.6699346405228758, "acc_stderr": 0.019023726160724553, "acc_norm": 0.6699346405228758, "acc_norm_stderr": 0.019023726160724553 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6363636363636364, "acc_stderr": 0.046075820907199756, "acc_norm": 0.6363636363636364, "acc_norm_stderr": 0.046075820907199756 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.7387755102040816, "acc_stderr": 0.028123429335142773, "acc_norm": 0.7387755102040816, "acc_norm_stderr": 0.028123429335142773 }, "harness|hendrycksTest-sociology|5": { "acc": 0.8407960199004975, "acc_stderr": 0.02587064676616913, "acc_norm": 0.8407960199004975, "acc_norm_stderr": 0.02587064676616913 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.88, "acc_stderr": 0.03265986323710906, "acc_norm": 0.88, "acc_norm_stderr": 0.03265986323710906 }, "harness|hendrycksTest-virology|5": { "acc": 0.5481927710843374, "acc_stderr": 0.03874371556587953, "acc_norm": 0.5481927710843374, "acc_norm_stderr": 0.03874371556587953 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.8421052631578947, "acc_stderr": 0.027966785859160882, "acc_norm": 0.8421052631578947, "acc_norm_stderr": 0.027966785859160882 }, "harness|truthfulqa:mc|0": { "mc1": 0.4528763769889841, "mc1_stderr": 0.01742558984831402, "mc2": 0.6265250771231468, "mc2_stderr": 0.015246664453677405 }, "harness|winogrande|5": { "acc": 0.8066298342541437, "acc_stderr": 0.011099796645920526 }, "harness|gsm8k|5": { "acc": 0.6679302501895376, "acc_stderr": 0.012972465034361861 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_PulsarAI__Draco-8x7B
[ "region:us" ]
2024-01-25T06:54:32+00:00
{"pretty_name": "Evaluation run of Weyaxi/Draco-8x7B", "dataset_summary": "Dataset automatically created during the evaluation run of model [Weyaxi/Draco-8x7B](https://huggingface.co/Weyaxi/Draco-8x7B) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_Weyaxi__Draco-8x7B\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-01-25T06:52:14.231942](https://huggingface.co/datasets/open-llm-leaderboard/details_Weyaxi__Draco-8x7B/blob/main/results_2024-01-25T06-52-14.231942.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.6523117174425681,\n \"acc_stderr\": 0.031872005741893215,\n \"acc_norm\": 0.6530656365171571,\n \"acc_norm_stderr\": 0.03252074448359809,\n \"mc1\": 0.4528763769889841,\n \"mc1_stderr\": 0.01742558984831402,\n \"mc2\": 0.6265250771231468,\n \"mc2_stderr\": 0.015246664453677405\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.6228668941979523,\n \"acc_stderr\": 0.014163366896192596,\n \"acc_norm\": 0.6501706484641638,\n \"acc_norm_stderr\": 0.013936809212158294\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6653057159928301,\n \"acc_stderr\": 0.004709190850274406,\n \"acc_norm\": 0.8524198366859191,\n \"acc_norm_stderr\": 0.003539584491392116\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.33,\n \"acc_stderr\": 0.04725815626252606,\n \"acc_norm\": 0.33,\n \"acc_norm_stderr\": 0.04725815626252606\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.6518518518518519,\n \"acc_stderr\": 0.041153246103369526,\n \"acc_norm\": 0.6518518518518519,\n \"acc_norm_stderr\": 0.041153246103369526\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.6973684210526315,\n \"acc_stderr\": 0.037385206761196686,\n \"acc_norm\": 0.6973684210526315,\n \"acc_norm_stderr\": 0.037385206761196686\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.64,\n \"acc_stderr\": 0.04824181513244218,\n \"acc_norm\": 0.64,\n \"acc_norm_stderr\": 0.04824181513244218\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.720754716981132,\n \"acc_stderr\": 0.027611163402399715,\n \"acc_norm\": 0.720754716981132,\n \"acc_norm_stderr\": 0.027611163402399715\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.7361111111111112,\n \"acc_stderr\": 0.03685651095897532,\n \"acc_norm\": 0.7361111111111112,\n \"acc_norm_stderr\": 0.03685651095897532\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.44,\n \"acc_stderr\": 0.04988876515698589,\n \"acc_norm\": 0.44,\n \"acc_norm_stderr\": 0.04988876515698589\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.52,\n \"acc_stderr\": 0.050211673156867795,\n \"acc_norm\": 0.52,\n \"acc_norm_stderr\": 0.050211673156867795\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.33,\n \"acc_stderr\": 0.04725815626252604,\n \"acc_norm\": 0.33,\n \"acc_norm_stderr\": 0.04725815626252604\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.6878612716763006,\n \"acc_stderr\": 0.03533133389323657,\n \"acc_norm\": 0.6878612716763006,\n \"acc_norm_stderr\": 0.03533133389323657\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.4019607843137255,\n \"acc_stderr\": 0.048786087144669955,\n \"acc_norm\": 0.4019607843137255,\n \"acc_norm_stderr\": 0.048786087144669955\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.78,\n \"acc_stderr\": 0.04163331998932263,\n \"acc_norm\": 0.78,\n \"acc_norm_stderr\": 0.04163331998932263\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.5829787234042553,\n \"acc_stderr\": 0.03223276266711712,\n \"acc_norm\": 0.5829787234042553,\n \"acc_norm_stderr\": 0.03223276266711712\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.47368421052631576,\n \"acc_stderr\": 0.046970851366478626,\n \"acc_norm\": 0.47368421052631576,\n \"acc_norm_stderr\": 0.046970851366478626\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.5862068965517241,\n \"acc_stderr\": 0.04104269211806232,\n \"acc_norm\": 0.5862068965517241,\n \"acc_norm_stderr\": 0.04104269211806232\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.3941798941798942,\n \"acc_stderr\": 0.02516798233389414,\n \"acc_norm\": 0.3941798941798942,\n \"acc_norm_stderr\": 0.02516798233389414\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.46825396825396826,\n \"acc_stderr\": 0.04463112720677172,\n \"acc_norm\": 0.46825396825396826,\n \"acc_norm_stderr\": 0.04463112720677172\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.37,\n \"acc_stderr\": 0.04852365870939099,\n \"acc_norm\": 0.37,\n \"acc_norm_stderr\": 0.04852365870939099\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.7838709677419354,\n \"acc_stderr\": 0.02341529343356852,\n \"acc_norm\": 0.7838709677419354,\n \"acc_norm_stderr\": 0.02341529343356852\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.4827586206896552,\n \"acc_stderr\": 0.035158955511657,\n \"acc_norm\": 0.4827586206896552,\n \"acc_norm_stderr\": 0.035158955511657\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.7,\n \"acc_stderr\": 0.046056618647183814,\n \"acc_norm\": 0.7,\n \"acc_norm_stderr\": 0.046056618647183814\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.7878787878787878,\n \"acc_stderr\": 0.03192271569548301,\n \"acc_norm\": 0.7878787878787878,\n \"acc_norm_stderr\": 0.03192271569548301\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.7828282828282829,\n \"acc_stderr\": 0.029376616484945633,\n \"acc_norm\": 0.7828282828282829,\n \"acc_norm_stderr\": 0.029376616484945633\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.8808290155440415,\n \"acc_stderr\": 0.023381935348121437,\n \"acc_norm\": 0.8808290155440415,\n \"acc_norm_stderr\": 0.023381935348121437\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.6692307692307692,\n \"acc_stderr\": 0.02385479568097112,\n \"acc_norm\": 0.6692307692307692,\n \"acc_norm_stderr\": 0.02385479568097112\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.3592592592592593,\n \"acc_stderr\": 0.029252905927251972,\n \"acc_norm\": 0.3592592592592593,\n \"acc_norm_stderr\": 0.029252905927251972\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.6848739495798319,\n \"acc_stderr\": 0.030176808288974337,\n \"acc_norm\": 0.6848739495798319,\n \"acc_norm_stderr\": 0.030176808288974337\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.3576158940397351,\n \"acc_stderr\": 0.03913453431177258,\n \"acc_norm\": 0.3576158940397351,\n \"acc_norm_stderr\": 0.03913453431177258\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.8477064220183487,\n \"acc_stderr\": 0.015405084393157074,\n \"acc_norm\": 0.8477064220183487,\n \"acc_norm_stderr\": 0.015405084393157074\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.5324074074074074,\n \"acc_stderr\": 0.03402801581358966,\n \"acc_norm\": 0.5324074074074074,\n \"acc_norm_stderr\": 0.03402801581358966\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.8529411764705882,\n \"acc_stderr\": 0.024857478080250458,\n \"acc_norm\": 0.8529411764705882,\n \"acc_norm_stderr\": 0.024857478080250458\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.810126582278481,\n \"acc_stderr\": 0.025530100460233504,\n \"acc_norm\": 0.810126582278481,\n \"acc_norm_stderr\": 0.025530100460233504\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.695067264573991,\n \"acc_stderr\": 0.030898610882477515,\n \"acc_norm\": 0.695067264573991,\n \"acc_norm_stderr\": 0.030898610882477515\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.7709923664122137,\n \"acc_stderr\": 0.036853466317118506,\n \"acc_norm\": 0.7709923664122137,\n \"acc_norm_stderr\": 0.036853466317118506\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.8347107438016529,\n \"acc_stderr\": 0.03390780612972776,\n \"acc_norm\": 0.8347107438016529,\n \"acc_norm_stderr\": 0.03390780612972776\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.7777777777777778,\n \"acc_stderr\": 0.040191074725573483,\n \"acc_norm\": 0.7777777777777778,\n \"acc_norm_stderr\": 0.040191074725573483\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.8098159509202454,\n \"acc_stderr\": 0.03083349114628123,\n \"acc_norm\": 0.8098159509202454,\n \"acc_norm_stderr\": 0.03083349114628123\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.48214285714285715,\n \"acc_stderr\": 0.047427623612430116,\n \"acc_norm\": 0.48214285714285715,\n \"acc_norm_stderr\": 0.047427623612430116\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.8349514563106796,\n \"acc_stderr\": 0.036756688322331886,\n \"acc_norm\": 0.8349514563106796,\n \"acc_norm_stderr\": 0.036756688322331886\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8717948717948718,\n \"acc_stderr\": 0.02190190511507332,\n \"acc_norm\": 0.8717948717948718,\n \"acc_norm_stderr\": 0.02190190511507332\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.74,\n \"acc_stderr\": 0.0440844002276808,\n \"acc_norm\": 0.74,\n \"acc_norm_stderr\": 0.0440844002276808\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.8250319284802043,\n \"acc_stderr\": 0.01358661921990334,\n \"acc_norm\": 0.8250319284802043,\n \"acc_norm_stderr\": 0.01358661921990334\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.7341040462427746,\n \"acc_stderr\": 0.02378620325550829,\n \"acc_norm\": 0.7341040462427746,\n \"acc_norm_stderr\": 0.02378620325550829\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.2860335195530726,\n \"acc_stderr\": 0.015113972129062136,\n \"acc_norm\": 0.2860335195530726,\n \"acc_norm_stderr\": 0.015113972129062136\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.7320261437908496,\n \"acc_stderr\": 0.025360603796242557,\n \"acc_norm\": 0.7320261437908496,\n \"acc_norm_stderr\": 0.025360603796242557\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.7041800643086816,\n \"acc_stderr\": 0.02592237178881877,\n \"acc_norm\": 0.7041800643086816,\n \"acc_norm_stderr\": 0.02592237178881877\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.7530864197530864,\n \"acc_stderr\": 0.0239935017090421,\n \"acc_norm\": 0.7530864197530864,\n \"acc_norm_stderr\": 0.0239935017090421\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.5141843971631206,\n \"acc_stderr\": 0.02981549448368206,\n \"acc_norm\": 0.5141843971631206,\n \"acc_norm_stderr\": 0.02981549448368206\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.4641460234680574,\n \"acc_stderr\": 0.01273736131873058,\n \"acc_norm\": 0.4641460234680574,\n \"acc_norm_stderr\": 0.01273736131873058\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.6985294117647058,\n \"acc_stderr\": 0.027875982114273168,\n \"acc_norm\": 0.6985294117647058,\n \"acc_norm_stderr\": 0.027875982114273168\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.6699346405228758,\n \"acc_stderr\": 0.019023726160724553,\n \"acc_norm\": 0.6699346405228758,\n \"acc_norm_stderr\": 0.019023726160724553\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6363636363636364,\n \"acc_stderr\": 0.046075820907199756,\n \"acc_norm\": 0.6363636363636364,\n \"acc_norm_stderr\": 0.046075820907199756\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.7387755102040816,\n \"acc_stderr\": 0.028123429335142773,\n \"acc_norm\": 0.7387755102040816,\n \"acc_norm_stderr\": 0.028123429335142773\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.8407960199004975,\n \"acc_stderr\": 0.02587064676616913,\n \"acc_norm\": 0.8407960199004975,\n \"acc_norm_stderr\": 0.02587064676616913\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.88,\n \"acc_stderr\": 0.03265986323710906,\n \"acc_norm\": 0.88,\n \"acc_norm_stderr\": 0.03265986323710906\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5481927710843374,\n \"acc_stderr\": 0.03874371556587953,\n \"acc_norm\": 0.5481927710843374,\n \"acc_norm_stderr\": 0.03874371556587953\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.8421052631578947,\n \"acc_stderr\": 0.027966785859160882,\n \"acc_norm\": 0.8421052631578947,\n \"acc_norm_stderr\": 0.027966785859160882\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.4528763769889841,\n \"mc1_stderr\": 0.01742558984831402,\n \"mc2\": 0.6265250771231468,\n \"mc2_stderr\": 0.015246664453677405\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.8066298342541437,\n \"acc_stderr\": 0.011099796645920526\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.6679302501895376,\n \"acc_stderr\": 0.012972465034361861\n }\n}\n```", "repo_url": "https://huggingface.co/Weyaxi/Draco-8x7B", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_01_25T06_52_14.231942", "path": ["**/details_harness|arc:challenge|25_2024-01-25T06-52-14.231942.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-01-25T06-52-14.231942.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_01_25T06_52_14.231942", "path": ["**/details_harness|gsm8k|5_2024-01-25T06-52-14.231942.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-01-25T06-52-14.231942.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_01_25T06_52_14.231942", "path": ["**/details_harness|hellaswag|10_2024-01-25T06-52-14.231942.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-01-25T06-52-14.231942.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_01_25T06_52_14.231942", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-25T06-52-14.231942.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-25T06-52-14.231942.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-25T06-52-14.231942.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-25T06-52-14.231942.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-25T06-52-14.231942.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-25T06-52-14.231942.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-25T06-52-14.231942.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-25T06-52-14.231942.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-25T06-52-14.231942.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-25T06-52-14.231942.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-25T06-52-14.231942.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-25T06-52-14.231942.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-25T06-52-14.231942.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-25T06-52-14.231942.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-25T06-52-14.231942.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-25T06-52-14.231942.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-25T06-52-14.231942.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-25T06-52-14.231942.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-25T06-52-14.231942.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-25T06-52-14.231942.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-25T06-52-14.231942.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-25T06-52-14.231942.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-25T06-52-14.231942.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-25T06-52-14.231942.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-25T06-52-14.231942.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-25T06-52-14.231942.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-25T06-52-14.231942.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-25T06-52-14.231942.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-25T06-52-14.231942.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-25T06-52-14.231942.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-25T06-52-14.231942.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-25T06-52-14.231942.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-25T06-52-14.231942.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-25T06-52-14.231942.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-25T06-52-14.231942.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-25T06-52-14.231942.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-25T06-52-14.231942.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-25T06-52-14.231942.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-25T06-52-14.231942.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-25T06-52-14.231942.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-25T06-52-14.231942.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-25T06-52-14.231942.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-25T06-52-14.231942.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-25T06-52-14.231942.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-25T06-52-14.231942.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-25T06-52-14.231942.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-25T06-52-14.231942.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-25T06-52-14.231942.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-25T06-52-14.231942.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-25T06-52-14.231942.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-25T06-52-14.231942.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-25T06-52-14.231942.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-25T06-52-14.231942.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-25T06-52-14.231942.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-25T06-52-14.231942.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-25T06-52-14.231942.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-25T06-52-14.231942.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-25T06-52-14.231942.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-25T06-52-14.231942.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-25T06-52-14.231942.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-25T06-52-14.231942.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-25T06-52-14.231942.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-25T06-52-14.231942.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-25T06-52-14.231942.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-25T06-52-14.231942.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-25T06-52-14.231942.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-25T06-52-14.231942.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-25T06-52-14.231942.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-25T06-52-14.231942.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-25T06-52-14.231942.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-25T06-52-14.231942.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-25T06-52-14.231942.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-25T06-52-14.231942.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-25T06-52-14.231942.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-25T06-52-14.231942.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-25T06-52-14.231942.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-25T06-52-14.231942.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-25T06-52-14.231942.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-25T06-52-14.231942.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-25T06-52-14.231942.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-25T06-52-14.231942.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-25T06-52-14.231942.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-25T06-52-14.231942.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-25T06-52-14.231942.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-25T06-52-14.231942.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-25T06-52-14.231942.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-25T06-52-14.231942.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-25T06-52-14.231942.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-25T06-52-14.231942.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-25T06-52-14.231942.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-25T06-52-14.231942.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-25T06-52-14.231942.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-25T06-52-14.231942.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-25T06-52-14.231942.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-25T06-52-14.231942.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-25T06-52-14.231942.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-25T06-52-14.231942.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-25T06-52-14.231942.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-25T06-52-14.231942.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-25T06-52-14.231942.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-25T06-52-14.231942.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-25T06-52-14.231942.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-25T06-52-14.231942.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-25T06-52-14.231942.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-25T06-52-14.231942.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-25T06-52-14.231942.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-25T06-52-14.231942.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-25T06-52-14.231942.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-25T06-52-14.231942.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-25T06-52-14.231942.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-25T06-52-14.231942.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-25T06-52-14.231942.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-25T06-52-14.231942.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-25T06-52-14.231942.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_01_25T06_52_14.231942", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-25T06-52-14.231942.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-25T06-52-14.231942.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_01_25T06_52_14.231942", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-25T06-52-14.231942.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-25T06-52-14.231942.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_01_25T06_52_14.231942", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-25T06-52-14.231942.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-25T06-52-14.231942.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_01_25T06_52_14.231942", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-25T06-52-14.231942.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-25T06-52-14.231942.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_01_25T06_52_14.231942", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-25T06-52-14.231942.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-25T06-52-14.231942.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_01_25T06_52_14.231942", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-25T06-52-14.231942.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-25T06-52-14.231942.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_01_25T06_52_14.231942", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-25T06-52-14.231942.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-25T06-52-14.231942.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_01_25T06_52_14.231942", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-25T06-52-14.231942.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-25T06-52-14.231942.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_01_25T06_52_14.231942", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-25T06-52-14.231942.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-25T06-52-14.231942.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_01_25T06_52_14.231942", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-25T06-52-14.231942.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-25T06-52-14.231942.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_01_25T06_52_14.231942", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-25T06-52-14.231942.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-25T06-52-14.231942.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_01_25T06_52_14.231942", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-25T06-52-14.231942.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-25T06-52-14.231942.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_01_25T06_52_14.231942", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-25T06-52-14.231942.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-25T06-52-14.231942.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_01_25T06_52_14.231942", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-25T06-52-14.231942.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-25T06-52-14.231942.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_01_25T06_52_14.231942", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-25T06-52-14.231942.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-25T06-52-14.231942.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_01_25T06_52_14.231942", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-25T06-52-14.231942.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-25T06-52-14.231942.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_01_25T06_52_14.231942", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-25T06-52-14.231942.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-25T06-52-14.231942.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_01_25T06_52_14.231942", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-25T06-52-14.231942.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-25T06-52-14.231942.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_01_25T06_52_14.231942", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-25T06-52-14.231942.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-25T06-52-14.231942.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_01_25T06_52_14.231942", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-25T06-52-14.231942.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-25T06-52-14.231942.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_01_25T06_52_14.231942", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-25T06-52-14.231942.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-25T06-52-14.231942.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_01_25T06_52_14.231942", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-25T06-52-14.231942.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-25T06-52-14.231942.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_01_25T06_52_14.231942", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-25T06-52-14.231942.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-25T06-52-14.231942.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_01_25T06_52_14.231942", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-25T06-52-14.231942.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-25T06-52-14.231942.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_01_25T06_52_14.231942", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-25T06-52-14.231942.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-25T06-52-14.231942.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_01_25T06_52_14.231942", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-25T06-52-14.231942.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-25T06-52-14.231942.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_01_25T06_52_14.231942", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-25T06-52-14.231942.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-25T06-52-14.231942.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_01_25T06_52_14.231942", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-25T06-52-14.231942.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-25T06-52-14.231942.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_01_25T06_52_14.231942", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-25T06-52-14.231942.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-25T06-52-14.231942.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_01_25T06_52_14.231942", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-25T06-52-14.231942.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-25T06-52-14.231942.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_01_25T06_52_14.231942", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-25T06-52-14.231942.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-25T06-52-14.231942.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_01_25T06_52_14.231942", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-25T06-52-14.231942.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-25T06-52-14.231942.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_01_25T06_52_14.231942", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-25T06-52-14.231942.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-25T06-52-14.231942.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_01_25T06_52_14.231942", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-25T06-52-14.231942.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-25T06-52-14.231942.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_01_25T06_52_14.231942", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-25T06-52-14.231942.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-25T06-52-14.231942.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_01_25T06_52_14.231942", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-25T06-52-14.231942.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-25T06-52-14.231942.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_01_25T06_52_14.231942", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-25T06-52-14.231942.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-25T06-52-14.231942.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_01_25T06_52_14.231942", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-25T06-52-14.231942.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-25T06-52-14.231942.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_01_25T06_52_14.231942", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-25T06-52-14.231942.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-25T06-52-14.231942.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_01_25T06_52_14.231942", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-25T06-52-14.231942.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-25T06-52-14.231942.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_01_25T06_52_14.231942", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-25T06-52-14.231942.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-25T06-52-14.231942.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_01_25T06_52_14.231942", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-25T06-52-14.231942.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-25T06-52-14.231942.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_01_25T06_52_14.231942", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-25T06-52-14.231942.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-25T06-52-14.231942.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_01_25T06_52_14.231942", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-25T06-52-14.231942.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-25T06-52-14.231942.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_01_25T06_52_14.231942", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-25T06-52-14.231942.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-25T06-52-14.231942.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_01_25T06_52_14.231942", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-25T06-52-14.231942.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-25T06-52-14.231942.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_01_25T06_52_14.231942", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-25T06-52-14.231942.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-25T06-52-14.231942.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_01_25T06_52_14.231942", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-25T06-52-14.231942.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-25T06-52-14.231942.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_01_25T06_52_14.231942", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-25T06-52-14.231942.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-25T06-52-14.231942.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_01_25T06_52_14.231942", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-25T06-52-14.231942.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-25T06-52-14.231942.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_01_25T06_52_14.231942", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-25T06-52-14.231942.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-25T06-52-14.231942.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_01_25T06_52_14.231942", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-25T06-52-14.231942.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-25T06-52-14.231942.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_01_25T06_52_14.231942", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-25T06-52-14.231942.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-25T06-52-14.231942.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_01_25T06_52_14.231942", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-25T06-52-14.231942.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-25T06-52-14.231942.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_01_25T06_52_14.231942", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-25T06-52-14.231942.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-25T06-52-14.231942.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_01_25T06_52_14.231942", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-25T06-52-14.231942.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-25T06-52-14.231942.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_01_25T06_52_14.231942", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-25T06-52-14.231942.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-25T06-52-14.231942.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_01_25T06_52_14.231942", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-25T06-52-14.231942.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-25T06-52-14.231942.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_01_25T06_52_14.231942", "path": ["**/details_harness|winogrande|5_2024-01-25T06-52-14.231942.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-01-25T06-52-14.231942.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_01_25T06_52_14.231942", "path": ["results_2024-01-25T06-52-14.231942.parquet"]}, {"split": "latest", "path": ["results_2024-01-25T06-52-14.231942.parquet"]}]}]}
2024-01-25T09:59:25+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of Weyaxi/Draco-8x7B Dataset automatically created during the evaluation run of model Weyaxi/Draco-8x7B on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-01-25T06:52:14.231942(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of Weyaxi/Draco-8x7B\n\n\n\nDataset automatically created during the evaluation run of model Weyaxi/Draco-8x7B on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-25T06:52:14.231942(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of Weyaxi/Draco-8x7B\n\n\n\nDataset automatically created during the evaluation run of model Weyaxi/Draco-8x7B on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-25T06:52:14.231942(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ 6, 183, 67, 4, 40, 29, 3, 4, 9, 6, 5, 7, 4, 7, 10, 9, 5, 9, 8, 10, 46, 8, 7, 10, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of Weyaxi/Draco-8x7B\n\n\n\nDataset automatically created during the evaluation run of model Weyaxi/Draco-8x7B on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2024-01-25T06:52:14.231942(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):## Dataset Details### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:## Uses### Direct Use### Out-of-Scope Use## Dataset Structure## Dataset Creation### Curation Rationale### Source Data#### Data Collection and Processing#### Who are the source data producers?### Annotations [optional]#### Annotation process#### Who are the annotators?#### Personal and Sensitive Information## Bias, Risks, and Limitations### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:## Glossary [optional]## More Information [optional]## Dataset Card Authors [optional]## Dataset Card Contact" ]
[ -0.03686206787824631, 0.21092961728572845, -0.005788113921880722, 0.04060595482587814, 0.07592004537582397, -0.015469972975552082, 0.04905271157622337, 0.11460424959659576, 0.022106291726231575, 0.18284568190574646, -0.021722793579101562, 0.09461306780576706, 0.08265043050050735, 0.09128307551145554, 0.023473618552088737, -0.13933445513248444, 0.026998719200491905, -0.09428323060274124, 0.11152368783950806, 0.06747978180646896, 0.05991804227232933, -0.07857168465852737, 0.06785891205072403, -0.027125902473926544, 0.04437876492738724, -0.013765729032456875, -0.0737103670835495, -0.01960180699825287, 0.10293661803007126, 0.10449373722076416, 0.04284103587269783, -0.020429152995347977, 0.02355692908167839, -0.27262887358665466, 0.016328638419508934, 0.0989384576678276, -0.008258304558694363, 0.03960591182112694, 0.14890411496162415, -0.09101788699626923, 0.0861162543296814, -0.03186183422803879, 0.06701157242059708, 0.050516411662101746, -0.11676257103681564, -0.15182140469551086, -0.16273155808448792, 0.00806242786347866, 0.061012573540210724, 0.0366923026740551, -0.025148794054985046, 0.1454956829547882, -0.06007261946797371, 0.04626407101750374, 0.13934768736362457, -0.1270657330751419, -0.01837635599076748, 0.0455118827521801, 0.023149635642766953, 0.08756019920110703, -0.08084463328123093, -0.02307025156915188, 0.03476545587182045, 0.05234711244702339, -0.007907127030193806, 0.018055114895105362, 0.003949716687202454, 0.015487683936953545, -0.1504325121641159, -0.1258944869041443, 0.1267116814851761, 0.0008290067780762911, -0.04914303123950958, -0.17446835339069366, -0.014545763842761517, 0.018977170810103416, 0.004286494571715593, 0.01396409422159195, -0.0033798273652791977, -0.013228868134319782, 0.09976811707019806, -0.010758237913250923, -0.09952449053525925, -0.026709727942943573, -0.00470691965892911, 0.062296874821186066, 0.030192961916327477, 0.003149580443277955, 0.0021053149830549955, 0.11866078525781631, 0.026667306199669838, -0.04930809885263443, -0.0680968165397644, -0.06169478967785835, -0.09618541598320007, -0.03745204582810402, 0.00892252754420042, -0.06640295684337616, 0.04635229706764221, 0.21855151653289795, -0.020161455497145653, 0.01973099820315838, -0.11264603585004807, 0.020036254078149796, 0.12727054953575134, 0.05970575287938118, -0.0839572548866272, -0.05834047496318817, -0.041749656200408936, 0.02923460863530636, 0.02942974679172039, -0.009217049926519394, 0.007680442649871111, 0.07503464818000793, 0.01010243222117424, 0.12379223108291626, 0.12104356288909912, 0.025115910917520523, -0.07859805226325989, -0.02304326929152012, 0.21296189725399017, -0.14172492921352386, -0.006080896593630314, 0.027643639594316483, -0.038950856775045395, -0.08971823006868362, 0.05584271252155304, -0.008165826089680195, -0.062082137912511826, 0.11859636753797531, -0.03987029939889908, -0.07350202649831772, -0.08973734825849533, -0.08353938907384872, 0.04495625197887421, -0.004251020960509777, -0.055904701352119446, -0.06266484409570694, -0.09977063536643982, -0.08886860311031342, 0.0365331694483757, -0.07552709430456161, -0.009096827358007431, 0.014192474074661732, 0.008056733757257462, -0.01048162393271923, -0.01151042990386486, 0.0999600738286972, -0.0641302689909935, 0.03473595157265663, -0.02844877913594246, 0.03325648978352547, 0.0939922109246254, 0.03565753996372223, -0.10905983299016953, 0.08085070550441742, -0.10622189939022064, 0.0952814593911171, -0.1180560290813446, -0.025255529209971428, -0.11867737025022507, 0.006381746381521225, -0.026723001152276993, 0.04767635837197304, -0.028187651187181473, 0.0897388756275177, -0.20418962836265564, -0.0022614654153585434, 0.18959228694438934, -0.12299279123544693, -0.06937215477228165, 0.09828026592731476, -0.04269053041934967, 0.0516718290746212, 0.04310227558016777, 0.09332722425460815, 0.10916879773139954, -0.08574655652046204, -0.09335942566394806, -0.04709804430603981, -0.034393567591905594, 0.1493208110332489, 0.0709625855088234, -0.10241375118494034, 0.10424862056970596, 0.03185390681028366, 0.001684641931205988, -0.06549686193466187, -0.008175211027264595, -0.06098490208387375, -0.009510092437267303, -0.0601181760430336, -0.06339427828788757, -0.011245251633226871, -0.07531432062387466, -0.02144037000834942, -0.06714244931936264, -0.005562301259487867, 0.10152871906757355, -0.027401963248848915, 0.016888203099370003, -0.07835415005683899, 0.04243205860257149, 0.0027718686033040285, 0.015278168953955173, -0.20860883593559265, -0.07405900210142136, 0.03356761485338211, -0.20465123653411865, 0.04692061245441437, 0.03319995850324631, 0.01835964433848858, 0.05232751742005348, -0.004507638979703188, 0.02828996255993843, 0.03576738014817238, -0.00958884134888649, -0.011940695345401764, -0.15416090190410614, -0.048036858439445496, -0.09404265880584717, 0.07287923991680145, -0.1249159574508667, -0.019122611731290817, 0.07011798769235611, 0.1450042724609375, 0.02866736799478531, -0.07353924959897995, 0.05743132904171944, 0.016663622111082077, -0.034659888595342636, -0.05379541963338852, 0.0010664862347766757, -0.028439147397875786, 0.039938509464263916, 0.056703999638557434, -0.17174507677555084, -0.11424598097801208, 0.07542213797569275, 0.1315469741821289, -0.05569375678896904, -0.08396181464195251, -0.05930899828672409, -0.05792195349931717, -0.08542134612798691, -0.07570025324821472, 0.07046735286712646, 0.09560917317867279, 0.045527465641498566, -0.07235150784254074, -0.0487116202712059, 0.002373965224251151, 0.036914750933647156, -0.07130871713161469, 0.11025195568799973, 0.07874659448862076, -0.09138678014278412, 0.11089081317186356, -0.04107297211885452, 0.0990448147058487, 0.09334763139486313, 0.020653078332543373, -0.10656385123729706, 0.011214252561330795, 0.06386048346757889, 0.045960601419210434, 0.07110381871461868, -0.02773822471499443, 0.024185094982385635, 0.08432942628860474, -0.006444585043936968, 0.04458780214190483, -0.07847245782613754, 0.033193763345479965, 0.027043506503105164, 0.0026390047278255224, 0.022739920765161514, 0.004101514350622892, 0.020387208089232445, 0.090916708111763, 0.029030760750174522, 0.0813765674829483, -0.026539398357272148, -0.05570371448993683, -0.10325295478105545, 0.13734829425811768, -0.08935960382223129, -0.2563333213329315, -0.16946406662464142, -0.057042237371206284, -0.027457792311906815, -0.006854267790913582, 0.05750406160950661, -0.008648041635751724, -0.10756007581949234, -0.11228538304567337, 0.055567264556884766, 0.0388445109128952, -0.14009374380111694, -0.04707157984375954, 0.04382304102182388, -0.014620926231145859, -0.1703573614358902, 0.038900673389434814, 0.047914303839206696, -0.06011169031262398, 0.015112793073058128, 0.05712776258587837, 0.10147155076265335, 0.09376471489667892, 0.08123946189880371, -0.023101024329662323, -0.0163017138838768, 0.16224101185798645, -0.11277975887060165, 0.030374523252248764, 0.0997023731470108, -0.04193912819027901, 0.07524710893630981, 0.14986975491046906, 0.01749131642282009, -0.08286019414663315, 0.05419622361660004, 0.10178828239440918, -0.055200424045324326, -0.2492341697216034, -0.11482060700654984, -0.027842404320836067, 0.030423255637288094, 0.10187539458274841, 0.06357207894325256, 0.011385636404156685, 0.007978622801601887, -0.12402718514204025, -0.02855803817510605, -0.04160849377512932, 0.06687300652265549, 0.030836034566164017, -0.011570739559829235, 0.041813116520643234, -0.049854982644319534, 0.014141845516860485, 0.12726446986198425, 0.04318590834736824, 0.15191689133644104, -0.025731829926371574, 0.18347159028053284, 0.09627550840377808, 0.07022526860237122, -0.030642906203866005, 0.0444219671189785, -0.020660260692238808, 0.06730610877275467, -0.02616998925805092, -0.10061291605234146, -0.0408816784620285, 0.0987318754196167, 0.0414523221552372, -0.07055250555276871, 0.04472477361559868, -0.08321171253919601, 0.047335803508758545, 0.2331487238407135, -0.01621881127357483, -0.11025828868150711, -0.04105588048696518, 0.06602082401514053, -0.046139802783727646, -0.09308472275733948, -0.0006847649347037077, 0.0914590135216713, -0.14793464541435242, -0.0035893996246159077, -0.04068206250667572, 0.07588402926921844, -0.13361109793186188, -0.021107815206050873, -0.028432512655854225, 0.037837982177734375, -0.012607945129275322, 0.1075974851846695, -0.1449376344680786, 0.09422055631875992, -0.003814356168732047, 0.01957389898598194, -0.0931747704744339, 0.05436116084456444, -0.020332302898168564, -0.06865833699703217, 0.1248464584350586, -0.004158057738095522, -0.07669863849878311, -0.03983958810567856, -0.10640104115009308, -0.017580294981598854, 0.06355883181095123, -0.10667166858911514, 0.11079051345586777, 0.01987456902861595, -0.024315960705280304, -0.04282037541270256, -0.014509526081383228, -0.08219967037439346, -0.2353975474834442, 0.0983051061630249, -0.1405172497034073, 0.0460357703268528, -0.062457211315631866, -0.04083993658423424, -0.05524320900440216, 0.11888998001813889, -0.11456361413002014, -0.062400802969932556, -0.0978582575917244, -0.02984066680073738, 0.16699880361557007, -0.04434892535209656, 0.056674711406230927, -0.03686809167265892, 0.16597603261470795, -0.03281371295452118, -0.048320919275283813, -0.0031435678247362375, -0.09140176326036453, -0.19117937982082367, -0.051422446966171265, 0.11479786038398743, 0.06994384527206421, 0.01938716135919094, -0.011537597514688969, 0.023830564692616463, 0.00703439163044095, -0.09642579406499863, 0.038374919444322586, 0.12051653116941452, 0.13246698677539825, 0.05521839112043381, -0.020986182615160942, -0.11311469972133636, -0.10928522050380707, -0.1122766062617302, 0.04400862753391266, 0.1703362613916397, -0.05549504980444908, 0.16057844460010529, 0.15465745329856873, -0.10034245252609253, -0.1976422220468521, -0.07708752155303955, 0.003065257566049695, -0.025709182024002075, 0.10821343958377838, -0.20308908820152283, 0.05000479519367218, 0.07164525240659714, -0.03018125891685486, 0.10823383927345276, -0.26350826025009155, -0.12744823098182678, 0.04526878148317337, 0.045358624309301376, -0.205783873796463, -0.17684951424598694, -0.10334167629480362, -0.01911681704223156, -0.13997618854045868, 0.13805165886878967, -0.008584789000451565, 0.027016302570700645, -0.024655625224113464, 0.07764225453138351, 0.04810225963592529, -0.0679023489356041, 0.1289013922214508, -0.03503658249974251, 0.034324027597904205, -0.09448797255754471, -0.03512939065694809, -0.03651392459869385, -0.04030267521739006, 0.07801959663629532, 0.015486960299313068, 0.046174563467502594, -0.09660056978464127, -0.029271574690937996, -0.06476367264986038, 0.02577451430261135, -0.06568203866481781, -0.050316501408815384, -0.08063308149576187, 0.08761614561080933, 0.07598896324634552, 0.003067080629989505, 0.046171072870492935, -0.049446068704128265, 0.0346548892557621, 0.2051730901002884, 0.10305964946746826, 0.059767335653305054, -0.09207887947559357, -0.04736987128853798, -0.015901315957307816, 0.007789330091327429, -0.09885181486606598, 0.045341528952121735, 0.07827089726924896, 0.041417717933654785, 0.10236026346683502, -0.025068530812859535, -0.19117149710655212, 0.0006309308228082955, 0.07897413522005081, -0.09945023059844971, -0.19192877411842346, 0.043181516230106354, 0.12864869832992554, -0.13372844457626343, -0.07502331584692001, 0.07971629500389099, 0.027172638103365898, -0.03868221864104271, -0.006601386237889528, 0.08476155251264572, 0.04106758534908295, 0.0861314982175827, 0.013475527055561543, 0.04663800075650215, -0.07090307027101517, 0.10148371011018753, 0.1432226300239563, -0.11239810287952423, 0.014735447242856026, 0.04591899365186691, -0.040140435099601746, -0.0709969624876976, -0.002128034830093384, 0.026620954275131226, 0.02466602437198162, -0.044801704585552216, 0.023317554965615273, -0.03715530037879944, 0.05493255704641342, 0.15287435054779053, -0.006052341777831316, 0.05851146951317787, 0.01976037211716175, 0.0022332570515573025, -0.06369996815919876, 0.09854401648044586, 0.022637560963630676, 0.03526962921023369, -0.025041865184903145, 0.021330850198864937, 0.009307393804192543, -0.0371759757399559, 0.019994331523776054, -0.0507754348218441, -0.08370129019021988, 0.005745903588831425, -0.1837559938430786, 0.061418671160936356, -0.07892818003892899, 0.002430855529382825, 0.009133892133831978, -0.008401960134506226, 0.008234698325395584, -0.0027760483790189028, -0.07596064358949661, -0.03714955225586891, -0.039324089884757996, 0.1368456333875656, -0.19173842668533325, -0.0002654684940353036, 0.0874641165137291, -0.06885986030101776, 0.0604175440967083, -0.0022671623155474663, -0.011505515314638615, 0.021876735612750053, -0.10083181411027908, -0.002152894390746951, -0.03000549226999283, 0.058799706399440765, 0.013834233395755291, -0.13287636637687683, -0.019527917727828026, -0.0013633394846692681, -0.08359348773956299, -0.006866185925900936, 0.027960268780589104, -0.1446744203567505, 0.08423155546188354, 0.08306436985731125, -0.047863755375146866, -0.0462862066924572, 0.045832619071006775, 0.05312953144311905, -0.004848908167332411, 0.09491223841905594, -0.019572531804442406, 0.028770364820957184, -0.14195723831653595, -0.03267255797982216, 0.006252628285437822, 0.014982262626290321, 0.03391696512699127, 0.019154759123921394, 0.019994603469967842, 0.0032830049749463797, 0.2342493236064911, -0.002570666838437319, 0.006883418187499046, 0.017059411853551865, -0.02121485397219658, -0.03877361863851547, 0.0349058099091053, -0.0041024331003427505, -0.00506183598190546, 0.023558234795928, -0.005323590710759163, -0.03585050255060196, -0.06491553783416748, -0.020249582827091217, 0.0792757123708725, 0.14540418982505798, 0.16754233837127686, -0.030963532626628876, 0.05855550989508629, -0.16705277562141418, -0.04086567834019661, 0.0017121981363743544, -0.025698542594909668, 0.043301891535520554, -0.07453788816928864, 0.07135631144046783, 0.09282521903514862, -0.10086750239133835, 0.15147656202316284, -0.05913246423006058, -0.017070909962058067, -0.03826385736465454, -0.16009648144245148, -0.031464360654354095, 0.03264467045664787, -0.00017386833496857435, -0.09446156769990921, 0.1225336417555809, 0.12615036964416504, -0.0008074623765423894, -0.008100440725684166, 0.07194213569164276, -0.07248641550540924, -0.054848361760377884, -0.02854633890092373, 0.005806584842503071, 0.026357222348451614, 0.00024414650397375226, 0.06462305784225464, 0.013872683048248291, 0.04954005777835846, 0.07333651185035706, 0.10724306106567383, 0.03690815344452858, 0.012999920174479485, -0.04229608178138733, -0.059160150587558746, 0.005042239557951689, -0.026121854782104492, -0.05489634349942207, 0.22192732989788055, 0.056744083762168884, 0.014703129418194294, 0.020004235208034515, 0.2036181539297104, -0.015390610322356224, -0.052179429680109024, -0.12827663123607635, 0.16743354499340057, -0.011447048746049404, 0.03061734326183796, 0.0304853655397892, -0.11847441643476486, 0.0154292993247509, 0.16168832778930664, 0.10518709570169449, 0.03048747219145298, 0.009285597130656242, 0.04147200658917427, 0.024622244760394096, -0.033458828926086426, 0.05042611435055733, 0.02989663928747177, 0.24044103920459747, -0.051747698336839676, 0.09326700121164322, -0.0008947487222030759, 0.0069978246465325356, -0.03709092736244202, 0.10283078253269196, -0.04154354706406593, 0.018137648701667786, -0.06892932206392288, 0.0820920467376709, -0.06624086201190948, -0.24691514670848846, -0.012384866364300251, -0.06773769110441208, -0.1308436244726181, -0.012885105796158314, 0.01223996002227068, -0.015409667044878006, 0.05544741824269295, 0.03551768884062767, -0.028646297752857208, 0.17606493830680847, 0.005076659377664328, -0.07321774959564209, -0.07281544059515, 0.06651444733142853, -0.056426212191581726, 0.2740027904510498, 0.0033438794780522585, 0.057313788682222366, 0.08595640957355499, -0.0241086483001709, -0.12882424890995026, 0.0259160827845335, 0.07530760020017624, -0.06036621332168579, 0.044766053557395935, 0.15860220789909363, -0.016107818111777306, 0.14587602019309998, 0.03727226331830025, -0.009756089188158512, 0.07086408138275146, 0.07962161302566528, 0.0307842455804348, -0.08447130024433136, 0.07328707724809647, -0.0966385155916214, 0.12773014605045319, 0.1138884425163269, -0.011915027163922787, 0.011759699322283268, -0.0485420823097229, 0.06373580545186996, -0.039538297802209854, 0.12553980946540833, -0.0262803602963686, -0.13224799931049347, 0.04054128751158714, 0.007480293978005648, 0.06219018995761871, -0.23822729289531708, -0.04842349514365196, 0.10429301857948303, -0.03858233615756035, 0.0011410913430154324, 0.06738732755184174, 0.045005153864622116, 0.03243834897875786, -0.05623757839202881, -0.1288360208272934, 0.01752377301454544, 0.11620268225669861, -0.07033513486385345, -0.03396325930953026 ]
f7f48b405b2114c07fdef951c3a6b09adba7471c
# Dataset Card for "lmind_nq_v1_doc_qa" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
tyzhu/lmind_nq_v1_doc_qa
[ "region:us" ]
2024-01-25T06:55:57+00:00
{"configs": [{"config_name": "default", "data_files": [{"split": "train_qa", "path": "data/train_qa-*"}, {"split": "train_recite_qa", "path": "data/train_recite_qa-*"}, {"split": "eval_qa", "path": "data/eval_qa-*"}, {"split": "eval_recite_qa", "path": "data/eval_recite_qa-*"}, {"split": "all_docs", "path": "data/all_docs-*"}, {"split": "train", "path": "data/train-*"}, {"split": "validation", "path": "data/validation-*"}]}], "dataset_info": {"features": [{"name": "answers", "struct": [{"name": "answer_start", "sequence": "null"}, {"name": "text", "sequence": "string"}]}, {"name": "inputs", "dtype": "string"}, {"name": "targets", "dtype": "string"}], "splits": [{"name": "train_qa", "num_bytes": 34574, "num_examples": 300}, {"name": "train_recite_qa", "num_bytes": 222533, "num_examples": 300}, {"name": "eval_qa", "num_bytes": 11254, "num_examples": 100}, {"name": "eval_recite_qa", "num_bytes": 73368, "num_examples": 100}, {"name": "all_docs", "num_bytes": 248990, "num_examples": 392}, {"name": "train", "num_bytes": 283564, "num_examples": 692}, {"name": "validation", "num_bytes": 11254, "num_examples": 100}], "download_size": 0, "dataset_size": 885537}}
2024-01-26T07:39:34+00:00
[]
[]
TAGS #region-us
# Dataset Card for "lmind_nq_v1_doc_qa" More Information needed
[ "# Dataset Card for \"lmind_nq_v1_doc_qa\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"lmind_nq_v1_doc_qa\"\n\nMore Information needed" ]
[ 6, 22 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"lmind_nq_v1_doc_qa\"\n\nMore Information needed" ]
[ -0.13288673758506775, 0.10527366399765015, -0.0027626946102827787, 0.013833411037921906, 0.026269935071468353, 0.08131967484951019, 0.04634411260485649, 0.12714913487434387, 0.18281406164169312, -0.0044153532944619656, 0.13788679242134094, 0.004943635314702988, 0.04605519026517868, 0.1115557998418808, -0.0029457425698637962, -0.09120140969753265, 0.026235230267047882, 0.03858061507344246, -0.06459300220012665, 0.023722738027572632, 0.011409247294068336, -0.06208372861146927, 0.0621960312128067, -0.06242115795612335, -0.16631022095680237, 0.07104575634002686, -0.021121833473443985, -0.045147836208343506, 0.06828927993774414, -0.04351632297039032, 0.09900368750095367, 0.07220478355884552, 0.052405472844839096, -0.21583741903305054, 0.03338887542486191, -0.04258585721254349, -0.11948443949222565, 0.03985650837421417, 0.014059147797524929, -0.061179205775260925, -0.09868644177913666, -0.03566166013479233, -0.045011524111032486, 0.017789509147405624, -0.12092497944831848, -0.17843204736709595, -0.10012634098529816, 0.015660937875509262, -0.03636804223060608, -0.033128850162029266, 0.0758356899023056, 0.13861513137817383, -0.1359655261039734, 0.08464030176401138, 0.14664101600646973, -0.19540449976921082, 0.06346309930086136, 0.15786190330982208, -0.017030756920576096, 0.07984904944896698, -0.010971412062644958, 0.07696730643510818, 0.05627240985631943, -0.06822160631418228, -0.052890073508024216, -0.0399613231420517, -0.13175161182880402, 0.1453075110912323, -0.04352913051843643, -0.06627031415700912, 0.2981433570384979, 0.039200179278850555, 0.03010261058807373, 0.11075640469789505, -0.01286320947110653, -0.17335399985313416, 0.014852846041321754, 0.06149529665708542, 0.022914160043001175, -0.016779262572526932, 0.09300442039966583, 0.023266630247235298, -0.07867511361837387, -0.15261244773864746, -0.1790175437927246, -0.0051153916865587234, -0.02104882150888443, 0.1428375244140625, -0.17762982845306396, -0.002566670998930931, -0.1708535999059677, -0.02607976645231247, -0.04514925926923752, -0.08180423080921173, -0.041311364620923996, 0.008175481110811234, 0.0062933750450611115, 0.054926566779613495, 0.17239677906036377, -0.010380752384662628, 0.03413235768675804, 0.010887367650866508, -0.0730423703789711, 0.0928606390953064, 0.14335636794567108, 0.01558978483080864, -0.04493888095021248, 0.09894807636737823, 0.034347519278526306, -0.060211457312107086, -0.0996219664812088, -0.05409605801105499, -0.11958518624305725, -0.025765713304281235, -0.056547924876213074, 0.14090433716773987, -0.056281641125679016, -0.07000835239887238, 0.002834862098097801, -0.01674886979162693, 0.1301482915878296, -0.08143610507249832, -0.01631223037838936, 0.04191046580672264, -0.044961072504520416, 0.005708066746592522, -0.04278001934289932, -0.01106230914592743, 0.03307124972343445, -0.05985623225569725, -0.06929560005664825, -0.015942271798849106, -0.003254121169447899, -0.03173728659749031, 0.0529966838657856, -0.07585201412439346, 0.09741385281085968, -0.14783088862895966, -0.1558237373828888, 0.049581337720155716, -0.006169924512505531, -0.01804523356258869, 0.1519618034362793, 0.029655102640390396, 0.050882741808891296, -0.04677104204893112, -0.03598078712821007, 0.08445285260677338, -0.08984813094139099, 0.07152363657951355, -0.018270453438162804, 0.11979348212480545, -0.2126249372959137, 0.027023261412978172, -0.04462480545043945, 0.05197136849164963, 0.06858831644058228, 0.042503248900175095, -0.1499713808298111, 0.00016649998724460602, -0.0765402764081955, -0.024730946868658066, -0.06711869686841965, -0.010249563492834568, -0.005132306832820177, 0.0698072612285614, -0.19082197546958923, -0.03281928226351738, 0.19490602612495422, -0.0585600882768631, -0.24110513925552368, 0.009773042052984238, -0.013904533348977566, -0.00818703044205904, -0.00859593041241169, 0.3712450861930847, 0.16034698486328125, -0.19573354721069336, -0.047614797949790955, 0.15111412107944489, -0.13271629810333252, -0.2703760266304016, 0.06757907569408417, 0.035399578511714935, -0.10193459689617157, 0.02097894623875618, 0.04964354634284973, 0.002744280267506838, -0.08788982033729553, -0.11332923918962479, -0.03564377874135971, -0.16403302550315857, -0.011254264041781425, -0.0502704456448555, 0.06274154782295227, 0.04125308245420456, 0.1606525182723999, -0.03171787038445473, 0.07631714642047882, 0.026626233011484146, -0.02233455330133438, -0.030819498002529144, 0.14489209651947021, -0.11292493343353271, 0.02442803420126438, -0.12952812016010284, -0.1417713165283203, 0.00789191946387291, -0.02216992899775505, 0.020311500877141953, 0.06097026541829109, 0.053051941096782684, -0.013202344998717308, 0.01669328659772873, 0.014667905867099762, 0.028042074292898178, 0.07159334421157837, -0.001675475388765335, 0.008075964637100697, 0.06438814103603363, -0.1009717583656311, -0.15235315263271332, -0.00591856986284256, -0.06764014065265656, 0.09340756386518478, -0.03425527364015579, 0.03198463097214699, 0.045639410614967346, 0.06788435578346252, 0.01710350438952446, 0.049633823335170746, 0.007004693150520325, 0.03458511456847191, -0.07272780686616898, -0.029155567288398743, 0.03839483857154846, 0.01478024572134018, 0.22528287768363953, 0.09131623804569244, -0.044790372252464294, 0.07730025053024292, -0.20059379935264587, -0.02476140297949314, -0.008200956508517265, -0.055105749517679214, 0.009556367993354797, -0.07829032838344574, -0.047905776649713516, 0.04787802696228027, -0.06961548328399658, 0.03192558139562607, 0.049250125885009766, -0.0014132026117295027, -0.10952278226613998, 0.07912740111351013, 0.18857711553573608, -0.14482122659683228, 0.07114057242870331, 0.2512415647506714, 0.13761380314826965, 0.11168374121189117, -0.07393282651901245, -0.09639270603656769, -0.022372271865606308, -0.022190377116203308, -0.06286510080099106, 0.17096540331840515, -0.04237717017531395, 0.020627232268452644, 0.09128222614526749, 0.04571332037448883, 0.0801868885755539, -0.09281032532453537, -0.12859882414340973, -0.03213603049516678, -0.053887344896793365, -0.29601526260375977, 0.07468999922275543, 0.006381156854331493, 0.11141927540302277, -0.001971712801605463, 0.03533775731921196, 0.07837404310703278, -0.021953575313091278, -0.05506991222500801, 0.14560246467590332, -0.16095027327537537, -0.2231294810771942, -0.008574888110160828, -0.10354369878768921, 0.0289948470890522, -0.046713292598724365, -0.03448125720024109, -0.16969433426856995, -0.033471863716840744, 0.02281416952610016, -0.09232082962989807, -0.10734958946704865, 0.01216103509068489, -0.017977066338062286, 0.03401865065097809, -0.03456960618495941, -0.11615435779094696, 0.0018084903713315725, -0.05157317966222763, 0.06543776392936707, 0.15715035796165466, -0.12989477813243866, 0.13630791008472443, 0.06586331129074097, -0.05130402743816376, 0.11200017482042313, -0.040544480085372925, 0.2541787922382355, 0.0010468969121575356, -0.01870153099298477, 0.07108145952224731, 0.04035115987062454, 0.0073729706928133965, 0.08307647705078125, 0.06664131581783295, -0.13271306455135345, -0.012618277221918106, -0.03174348920583725, -0.15874549746513367, -0.23436351120471954, -0.09443315863609314, -0.04502996429800987, 0.08780847489833832, 0.026002218946814537, 0.08041682839393616, -0.054791003465652466, 0.035489894449710846, 0.153323233127594, 0.010634360834956169, -0.20280584692955017, -0.06262224167585373, -0.018097493797540665, 0.00034714117646217346, -0.028002765029668808, -0.12154155969619751, 0.053682420402765274, 0.12607380747795105, 0.3372894525527954, 0.23480218648910522, 0.048221684992313385, 0.11567376554012299, 0.08314220607280731, 0.22611960768699646, 0.14556169509887695, 0.11372121423482895, 0.04760725796222687, -0.03857181593775749, 0.041008345782756805, -0.014394138008356094, -0.027992114424705505, -0.008788497187197208, 0.109098419547081, -0.1455124020576477, -0.023104052990674973, -0.033854614943265915, 0.03042709454894066, -0.12289582937955856, 0.04369209706783295, -0.06215792894363403, 0.07930241525173187, -0.010556299239397049, 0.1587131917476654, -0.061844632029533386, 0.11447155475616455, 0.041272327303886414, -0.029712526127696037, -0.02659095637500286, 0.05715290457010269, 0.08063503354787827, -0.027780143544077873, 0.011357268318533897, -0.08958224207162857, -0.11157157272100449, -0.009959138929843903, 0.13306546211242676, -0.22534222900867462, 0.29315638542175293, 0.05776382237672806, -0.07406206429004669, -0.06497747451066971, -0.10158991068601608, -0.04039482772350311, 0.09589479118585587, 0.195698082447052, 0.059723857790231705, -0.10395753383636475, -0.2050335705280304, -0.08371932804584503, 0.030795658007264137, 0.09893655776977539, 0.08274389058351517, -0.09238851070404053, 0.0648537129163742, -0.00812344066798687, -0.06327029317617416, -0.056540410965681076, -0.06934577226638794, -0.064277783036232, 0.013243972323834896, 0.022860586643218994, -0.0226377472281456, 0.013100909069180489, 0.01278003491461277, -0.190362811088562, -0.029612889513373375, 0.005904847756028175, -0.04744662716984749, -0.09540597349405289, 0.023145530372858047, 0.11956775933504105, -0.06264372169971466, -0.0032483916729688644, -0.0034751184284687042, -0.04584658518433571, -0.0405702143907547, -0.20595517754554749, 0.053395435214042664, -0.09444987773895264, 0.08210761845111847, -0.0726248100399971, 0.12289191037416458, -0.036250289529561996, 0.04177064076066017, 0.012581216171383858, -0.0034109302796423435, -0.07558086514472961, -0.09561637789011002, 0.2164270430803299, -0.1433667242527008, 0.055254340171813965, 0.14463964104652405, -0.07288055866956711, 0.05863845348358154, 0.0746622160077095, -0.02112463489174843, 0.22738130390644073, 0.17422053217887878, -0.08846056461334229, 0.20882506668567657, 0.1668263077735901, -0.04175985977053642, -0.2363259494304657, 0.007434004917740822, -0.09654410928487778, -0.02456090971827507, 0.050262950360774994, -0.19969145953655243, 0.09577775746583939, 0.14168748259544373, -0.010047479532659054, 0.23369601368904114, -0.11790932714939117, -0.017331821843981743, 0.18051692843437195, 0.07409696280956268, 0.27250465750694275, -0.13263380527496338, -0.0559210330247879, -0.005874731112271547, -0.18690019845962524, 0.21196332573890686, -0.08869816362857819, 0.04256986081600189, -0.05559398606419563, 0.12582963705062866, 0.0001428730320185423, -0.057604480534791946, 0.1740005910396576, 0.07889040559530258, 0.06809069216251373, -0.04053060710430145, -0.15486720204353333, 0.05364074558019638, -0.048598580062389374, 0.03387579321861267, 0.07581962645053864, 0.10116036236286163, -0.15625624358654022, 0.020489061251282692, -0.042250484228134155, -0.0032609226182103157, 0.040915973484516144, -0.09694887697696686, -0.09123919904232025, -0.00007420685142278671, -0.05087650567293167, -0.037909407168626785, 0.06607353687286377, -0.04530594125390053, -0.04760920628905296, 0.11649513244628906, -0.014987768605351448, -0.06619426608085632, -0.015738174319267273, -0.04910191521048546, -0.08580812811851501, 0.09931748360395432, -0.22959664463996887, 0.05533595755696297, 0.16534002125263214, 0.007778896484524012, 0.040184177458286285, 0.03853493928909302, 0.012869349680840969, 0.0442003533244133, 0.11992128938436508, -0.09395653009414673, -0.07436972856521606, 0.018735354766249657, -0.06532801687717438, 0.032939061522483826, 0.044374652206897736, -0.000605108798481524, 0.02726317010819912, -0.007340220734477043, -0.03353773429989815, 0.03712955862283707, -0.06510532647371292, 0.05723973736166954, 0.19324997067451477, 0.06142116338014603, -0.1604350209236145, 0.13705416023731232, 0.0189085491001606, -0.0633123591542244, 0.008666129782795906, -0.004317363724112511, -0.10172861814498901, -0.10466514527797699, -0.02318456768989563, 0.17305241525173187, -0.1204354465007782, -0.08475364744663239, 0.020282480865716934, -0.05977355316281319, 0.043161116540431976, 0.21573752164840698, -0.015098532661795616, 0.024629278108477592, 0.03694868087768555, -0.06257747113704681, 0.005792559124529362, 0.02392727881669998, -0.20197340846061707, 0.001358475536108017, -0.05938758701086044, -0.1519598662853241, -0.09475293755531311, 0.3303859233856201, -0.04945547133684158, -0.06907876580953598, -0.12289469689130783, 0.07084879279136658, -0.24568498134613037, 0.0706588476896286, 0.032754525542259216, 0.02076726406812668, -0.022792799398303032, 0.002778880298137665, -0.018137818202376366, 0.020842306315898895, -0.12889045476913452, 0.09322279691696167, 0.032668497413396835, 0.04823923856019974, -0.0801854133605957, -0.026690687984228134, 0.05766555666923523, 0.08390060067176819, 0.13066600263118744, 0.09599579870700836, 0.0030536302365362644, 0.1371157020330429, -0.060498401522636414, -0.10280507802963257, 0.062416739761829376, 0.09625368565320969, 0.08554255962371826, 0.008337908424437046, 0.0006940874736756086, 0.028623100370168686, -0.11784784495830536, 0.04276437312364578, -0.010089831426739693, -0.04479922354221344, -0.03650590404868126, -0.1394886076450348, -0.02671630308032036, -0.024867255240678787, -0.05632689967751503, 0.1073281541466713, 0.026525523513555527, -0.05811184644699097, 0.05588628724217415, 0.022962696850299835, -0.03399180620908737, -0.07179267704486847, 0.04006585478782654, -0.11790098249912262, -0.0941639393568039, 0.031192533671855927, 0.04311764985322952, -0.0837506353855133, 0.43100133538246155, -0.07528705149888992, -0.14840665459632874, -0.03616926074028015, 0.11602503061294556, -0.08256828784942627, -0.013278811238706112, 0.33860576152801514, 0.07663647085428238, -0.009939276613295078, -0.05644020438194275, 0.06628038734197617, -0.03460783511400223, 0.21035730838775635, 0.1309383511543274, 0.018952472135424614, 0.0841783732175827, -0.04648680239915848, 0.07967103272676468, -0.08775512874126434, 0.06566651165485382, -0.08550712466239929, -0.010326549410820007, -0.012012312188744545, 0.03825651481747627, -0.13802188634872437, 0.09922458231449127, -0.04145769774913788, 0.06008930504322052, -0.03480720520019531, -0.12342773377895355, -0.09766336530447006, -0.08937660604715347, -0.04759930819272995, -0.05609963834285736, 0.038976818323135376, -0.10022048652172089, 0.02120797149837017, 0.08610603213310242, 0.06905166804790497, 0.05212652310729027, 0.09823112189769745, -0.06452063471078873, 0.06101161986589432, -0.008473744615912437, -0.011390642262995243, -0.04134012758731842, -0.02555103600025177, 0.018852777779102325, 0.06892552226781845, -0.09959200024604797, -0.05489759519696236, 0.02565375156700611, 0.04133821278810501, -0.015673251822590828, -0.1693786382675171, -0.04347473382949829, -0.07819575071334839, 0.06597867608070374, -0.09081762284040451, 0.10339947044849396, 0.047388218343257904, 0.017337404191493988, 0.0589960478246212, 0.07491803914308548, 0.06496896594762802, 0.007029881700873375, -0.03182606399059296, -0.08532457053661346, -0.06161380559206009, 0.058694157749414444, -0.007145704235881567, -0.037576742470264435, -0.025236885994672775, 0.28069859743118286, 0.25523388385772705, -0.09482723474502563, 0.020998600870370865, 0.012485071085393429, 0.05002967268228531, 0.07331694662570953, 0.17022469639778137, 0.062071483582258224, 0.11111277341842651, -0.05183134973049164, -0.11692363023757935, -0.04624138027429581, -0.02846745401620865, -0.0459749698638916, 0.07362405210733414, 0.07970142364501953, -0.0707111805677414, -0.1150544211268425, 0.08338578790426254, -0.20764270424842834, 0.13345956802368164, 0.04608470946550369, -0.0791287049651146, -0.08457422256469727, -0.0694771483540535, -0.0063370899297297, 0.017585599794983864, -0.07704085111618042, -0.1175527572631836, -0.002429673448204994, 0.03438663110136986, -0.02306024730205536, -0.30019140243530273, -0.11446057260036469, 0.03383399173617363, 0.1805456280708313, 0.0313551127910614, 0.03227777034044266, 0.13585683703422546, -0.023185405880212784, 0.038127683103084564, -0.09244804084300995, 0.12075360119342804, 0.028932340443134308, -0.01770356297492981, -0.09758952260017395, 0.0076311491429805756, -0.01961025409400463, -0.08130057156085968, 0.04195556044578552, 0.03840799629688263, -0.04177263379096985, 0.12144555151462555, -0.013279443606734276, -0.015089485794305801, 0.04509550333023071, -0.045908503234386444, 0.12048172950744629, 0.013169880956411362, -0.008461653254926205, 0.004260885529220104, -0.03853238746523857, 0.07193128019571304, 0.03425564616918564, -0.11824388802051544, -0.06786617636680603, 0.056961145251989365, -0.0261533260345459, 0.11422723531723022, -0.02961432747542858, -0.09710939228534698, 0.015191560611128807, -0.09433931857347488, 0.04975716024637222, -0.006441897712647915, 0.11632181704044342, 0.17021803557872772, 0.08520002663135529, -0.026151413097977638, 0.01450594887137413, 0.06565022468566895, -0.0029114792123436928, -0.05648328363895416, -0.11039431393146515 ]
7f75ed8bd27e73cba95df84f7f488e317785aea7
# Dataset Card for "relabeled_alpacafarm_pythiasft_20K_preference_data" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
Asap7772/relabeled_alpacafarm_pythiasft_20K_preference_data
[ "region:us" ]
2024-01-25T07:36:59+00:00
{"configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}, {"split": "test", "path": "data/test-*"}]}], "dataset_info": {"features": [{"name": "output", "dtype": "string"}, {"name": "text", "dtype": "string"}, {"name": "alpaca_text", "dtype": "string"}, {"name": "prompt", "dtype": "string"}, {"name": "alpaca_prompt", "dtype": "string"}, {"name": "y_ref", "dtype": "string"}, {"name": "y_1", "dtype": "string"}, {"name": "y_2", "dtype": "string"}, {"name": "y_w", "dtype": "string"}, {"name": "y_w_alpaca", "dtype": "string"}, {"name": "y_l", "dtype": "string"}, {"name": "y_l_alpaca", "dtype": "string"}, {"name": "y_w_score", "dtype": "float64"}, {"name": "y_l_score", "dtype": "float64"}, {"name": "score_diff", "dtype": "float64"}], "splits": [{"name": "train", "num_bytes": 177945579, "num_examples": 19000}, {"name": "test", "num_bytes": 9378616, "num_examples": 1000}], "download_size": 86095242, "dataset_size": 187324195}}
2024-01-25T07:37:07+00:00
[]
[]
TAGS #region-us
# Dataset Card for "relabeled_alpacafarm_pythiasft_20K_preference_data" More Information needed
[ "# Dataset Card for \"relabeled_alpacafarm_pythiasft_20K_preference_data\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"relabeled_alpacafarm_pythiasft_20K_preference_data\"\n\nMore Information needed" ]
[ 6, 31 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"relabeled_alpacafarm_pythiasft_20K_preference_data\"\n\nMore Information needed" ]
[ -0.07265808433294296, 0.1419757753610611, -0.0037601818330585957, 0.027583319693803787, 0.12989242374897003, 0.04112614691257477, 0.01355245616286993, 0.10007704794406891, 0.10728488117456436, -0.01714872010052204, 0.051806915551424026, 0.13475516438484192, 0.04892417788505554, 0.19214895367622375, -0.06677887588739395, -0.0944991260766983, 0.03076024539768696, 0.06067301705479622, 0.09212779253721237, 0.09054146707057953, 0.03504035621881485, -0.08305829018354416, 0.07961001992225647, -0.0792006328701973, -0.19892720878124237, 0.10790752619504929, -0.00716834980994463, -0.1166415885090828, 0.06869661808013916, -0.04215598106384277, 0.19697482883930206, -0.01427704282104969, 0.0021777625661343336, -0.11659466475248337, 0.030094778165221214, -0.021640246734023094, -0.07405372709035873, 0.0654148980975151, 0.08935213088989258, -0.12580879032611847, 0.06687451899051666, -0.007139786146581173, -0.0026733491104096174, -0.021184638142585754, -0.15676124393939972, -0.18850646913051605, -0.11359640210866928, 0.027887659147381783, -0.0009847070323303342, -0.017298849299550056, 0.0637037605047226, 0.10193152725696564, -0.133845254778862, 0.07499022036790848, 0.19095510244369507, -0.0243668295443058, 0.03922370448708534, 0.08205197751522064, -0.03663095459342003, 0.049631327390670776, 0.009611769579350948, 0.04137531667947769, 0.04285712540149689, -0.010835513472557068, 0.0013962984085083008, -0.045159824192523956, -0.21820834279060364, 0.06855830550193787, -0.04618760198354721, -0.08323178440332413, 0.2556874752044678, 0.02912813238799572, 0.006741215009242296, 0.03373933210968971, -0.07054135948419571, 0.02093510329723358, 0.0006904543843120337, 0.028603993356227875, 0.04215409234166145, -0.04449800029397011, 0.11643665283918381, 0.03460442274808884, -0.10829201340675354, -0.028797071427106857, -0.1887444406747818, 0.16217327117919922, -0.03353811055421829, 0.13954932987689972, -0.11563508957624435, 0.020541882142424583, -0.07375306636095047, -0.043119315057992935, 0.03529012203216553, -0.06882718205451965, -0.0026166329625993967, -0.06920474767684937, -0.023633768782019615, 0.017871640622615814, 0.06076936051249504, 0.06057596206665039, 0.045948874205350876, 0.0009980163304135203, -0.04517245292663574, 0.051135748624801636, 0.12323653697967529, -0.09035585820674896, -0.08215942978858948, 0.07267257571220398, 0.027547065168619156, -0.043044429272413254, -0.004888805560767651, -0.04202176257967949, -0.054912932217121124, -0.051791489124298096, 0.043801043182611465, 0.0802793949842453, -0.04974251985549927, -0.06098811328411102, -0.11885342746973038, -0.08499933779239655, 0.13744212687015533, -0.1363980770111084, -0.028788762167096138, 0.009446601383388042, -0.13413527607917786, 0.1025928258895874, 0.012118728831410408, -0.031805120408535004, 0.00545708229765296, 0.09663289040327072, -0.07890389114618301, 0.0068237329833209515, -0.04443180188536644, -0.04467596486210823, 0.07789366692304611, -0.1109805479645729, 0.07131106406450272, -0.10970557481050491, -0.25892317295074463, -0.025299159809947014, 0.041076600551605225, -0.004936275538057089, 0.08239461481571198, 0.08583976328372955, 0.06879707425832748, -0.05737656354904175, -0.05701490119099617, 0.08957726508378983, -0.07830542325973511, 0.10012342035770416, -0.04644318297505379, 0.09738633781671524, -0.04807448387145996, 0.06562560051679611, -0.07639141380786896, -0.019270075485110283, -0.09540581703186035, 0.01563466712832451, -0.06971000134944916, 0.20635029673576355, -0.11951372027397156, -0.06048545613884926, -0.02497551590204239, -0.016089515760540962, -0.05419584736227989, 0.047978516668081284, -0.14942997694015503, -0.0771772712469101, 0.15935805439949036, -0.09748456627130508, -0.0810086652636528, -0.0520077683031559, -0.059372495859861374, -0.020388804376125336, 0.11619587987661362, 0.22395552694797516, 0.0817028060555458, -0.06813652813434601, -0.0014775489689782262, 0.0175727978348732, -0.15632005035877228, -0.26784080266952515, 0.057029951363801956, 0.03295425698161125, -0.15168185532093048, 0.05995290353894234, 0.08756016939878464, 0.01975002884864807, -0.020038018003106117, -0.08943575620651245, -0.004547689110040665, -0.11787505447864532, -0.005517854355275631, 0.026006242260336876, 0.024619590491056442, 0.017200665548443794, 0.13607969880104065, 0.02553560584783554, 0.07807424664497375, 0.03551376610994339, -0.0420684851706028, -0.002874837489798665, 0.15877893567085266, -0.007199175655841827, -0.005008561071008444, -0.18052798509597778, -0.036080099642276764, -0.006411850452423096, -0.10546265542507172, -0.05928083881735802, 0.005093167535960674, 0.016505056992173195, -0.029746398329734802, 0.012459498830139637, 0.09160342812538147, 0.04770420119166374, 0.002046981593593955, 0.02354736439883709, -0.008886203169822693, 0.025762351229786873, -0.06419405341148376, -0.01592184789478779, -0.011261636391282082, -0.06048688665032387, 0.12214944511651993, 0.0777255967259407, 0.012803754769265652, 0.01031588390469551, 0.08442134410142899, 0.007292682770639658, 0.03291783481836319, -0.024855848401784897, 0.048456788063049316, -0.08882862329483032, 0.028365835547447205, -0.04320881888270378, -0.0416833758354187, 0.2612558901309967, 0.12013754993677139, -0.004721394274383783, -0.03464340791106224, -0.12521106004714966, -0.02528274990618229, -0.014225090853869915, -0.03316023573279381, 0.09652069956064224, -0.05217524245381355, -0.000164024080731906, 0.11625627428293228, -0.07656962424516678, 0.028414525091648102, 0.01938708871603012, -0.007706174626946449, -0.05922182649374008, 0.0481436587870121, 0.052437201142311096, -0.2993161678314209, 0.09439599514007568, 0.1440185159444809, 0.11601779609918594, 0.08076637238264084, -0.04167593643069267, -0.060388170182704926, 0.010752849280834198, -0.02167324349284172, -0.07416483014822006, 0.06880954653024673, 0.09179779142141342, 0.009959636256098747, 0.10189195722341537, -0.08235704153776169, 0.047918856143951416, -0.085238017141819, -0.061001427471637726, -0.01245884783565998, -0.0045076943933963776, -0.09907583147287369, 0.0178237147629261, 0.04266507923603058, 0.10765548795461655, -0.0376577191054821, -0.019706670194864273, 0.10483062267303467, 0.007532088551670313, 0.003748215502128005, 0.18865719437599182, -0.13410347700119019, -0.2910822033882141, -0.029368655756115913, -0.10040353238582611, 0.02564345672726631, 0.011613838374614716, -0.02149447239935398, -0.043714091181755066, -0.08295375853776932, 0.0799044668674469, -0.0787433385848999, -0.061072755604982376, -0.047424014657735825, -0.0637112408876419, -0.0043425909243524075, -0.03180635720491409, -0.10578122735023499, 0.010728641413152218, -0.04966142773628235, 0.0621199905872345, 0.1880323439836502, -0.060945067554712296, 0.050906702876091, 0.0774579867720604, -0.04688243567943573, 0.03356267139315605, 0.021429361775517464, 0.07407321780920029, -0.009505894966423512, 0.016924452036619186, 0.1041901484131813, 0.03448832780122757, 0.06433872878551483, 0.11089300364255905, 0.0642467588186264, -0.11539380252361298, -0.000016815778508316725, -0.0038611888885498047, -0.1245507001876831, -0.2963825762271881, -0.15732374787330627, -0.0036385045386850834, -0.05412273108959198, 0.1417108178138733, 0.04667457193136215, 0.04525700956583023, 0.12277340143918991, 0.056302621960639954, 0.023990701884031296, -0.18277814984321594, -0.034780144691467285, 0.1539696455001831, -0.007123455870896578, 0.03609549626708031, -0.10145597904920578, -0.011521757580339909, 0.13944853842258453, 0.1446867287158966, 0.26798006892204285, 0.06498817354440689, 0.01776694320142269, 0.0300788264721632, 0.08982565999031067, 0.10284870117902756, 0.16335614025592804, 0.11626406759023666, -0.012423824518918991, -0.02120697870850563, 0.021136542782187462, -0.07502584904432297, 0.03259378671646118, 0.05904465913772583, -0.1467076689004898, 0.021862391382455826, -0.1626303493976593, 0.00901260320097208, 0.00036829564487561584, -0.031587615609169006, -0.2378879338502884, 0.09196767210960388, -0.0023167601320892572, 0.05673491209745407, -0.09135141968727112, 0.033857353031635284, -0.05807526409626007, -0.07226154953241348, 0.1378621757030487, -0.04317794740200043, 0.07093821465969086, -0.09329541027545929, -0.0005713970167562366, -0.044128622859716415, -0.05500810965895653, 0.0056994217447936535, 0.14432039856910706, -0.22145245969295502, 0.2598440647125244, 0.05037609115242958, -0.11013472825288773, -0.11311863362789154, -0.09675496816635132, -0.014995269477367401, 0.01751132495701313, 0.13365530967712402, 0.045425813645124435, -0.16111956536769867, -0.16384486854076385, -0.12827113270759583, -0.03853458911180496, 0.018448740243911743, 0.04291318356990814, -0.06715531647205353, 0.07049503922462463, 0.01796346716582775, -0.08493513613939285, -0.09013369679450989, -0.05186818912625313, -0.07898030430078506, 0.04573911055922508, 0.04559047520160675, -0.06241900846362114, 0.000696450297255069, -0.06172626093029976, -0.037889428436756134, 0.07704797387123108, 0.04468553885817528, -0.10799302160739899, -0.12982501089572906, 0.0406162291765213, 0.1154102012515068, -0.05451356619596481, 0.05800078436732292, -0.021432939916849136, -0.03822853043675423, -0.05164499580860138, -0.19340035319328308, 0.0864696353673935, -0.041598670184612274, 0.09869340807199478, -0.041470400989055634, 0.1097974106669426, 0.03326500952243805, -0.010779977776110172, 0.03320527821779251, 0.016353663057088852, -0.05365198850631714, -0.05710950866341591, 0.10121876746416092, 0.02813628315925598, 0.15090937912464142, 0.21906791627407074, -0.02866675704717636, -0.015053557232022285, 0.037780337035655975, 0.011877888813614845, 0.1654728502035141, 0.02812272123992443, -0.06316749006509781, 0.19800999760627747, 0.14688320457935333, -0.045971762388944626, -0.2088325321674347, -0.022967100143432617, -0.040289804339408875, 0.003831561654806137, 0.03154834732413292, -0.3560532331466675, 0.15249116718769073, 0.13767439126968384, -0.04475189000368118, 0.1809336245059967, -0.061838679015636444, -0.02477283589541912, 0.2532278299331665, 0.05358908325433731, 0.3868982493877411, -0.12383554130792618, -0.045713745057582855, -0.11203137040138245, -0.05909798666834831, 0.2434951215982437, -0.07821767032146454, 0.057421572506427765, -0.11778361350297928, 0.24943570792675018, 0.035281047224998474, -0.04927128180861473, 0.15092253684997559, 0.13310715556144714, 0.1422109752893448, -0.10973420739173889, -0.06238975003361702, 0.06805332750082016, -0.02980952151119709, 0.0596635527908802, 0.06996563822031021, 0.06940416991710663, -0.12033630907535553, -0.010769227519631386, -0.04720088839530945, 0.10599859803915024, 0.06340808421373367, -0.022412659600377083, -0.08469458669424057, 0.012420558370649815, -0.061084531247615814, -0.04005412012338638, -0.0009119219612330198, 0.03377557173371315, 0.08105038106441498, 0.12308676540851593, -0.05624838545918465, -0.0676376223564148, 0.0005937035311944783, -0.07730529457330704, -0.04268881306052208, 0.08399923890829086, -0.1986623853445053, 0.02395772375166416, 0.17588204145431519, 0.04081820696592331, 0.03164663910865784, -0.015519749373197556, -0.07312427461147308, -0.0611066110432148, 0.18532007932662964, -0.17173166573047638, 0.04303904250264168, 0.03293979540467262, 0.009037344716489315, 0.004485896788537502, -0.03872718662023544, 0.014253424480557442, 0.06926825642585754, -0.06462179869413376, -0.023363884538412094, 0.018922513350844383, 0.0003724850539583713, 0.10733623057603836, 0.12133598327636719, 0.025847196578979492, -0.18135857582092285, 0.1663597673177719, 0.1006704792380333, -0.11066289991140366, -0.03021518513560295, -0.049237001687288284, -0.09732579439878464, -0.10703256726264954, -0.03601435571908951, 0.12944863736629486, -0.144145667552948, -0.09722840040922165, 0.005252228118479252, -0.05442124977707863, 0.02435312047600746, 0.11398017406463623, 0.038671065121889114, 0.10383908450603485, -0.003819983219727874, -0.10957871377468109, 0.011979260481894016, -0.05668622627854347, -0.024662965908646584, -0.0027001521084457636, -0.11166949570178986, -0.09490139037370682, -0.048651039600372314, 0.17308661341667175, -0.038629282265901566, -0.03420170769095421, -0.04943260923027992, 0.0285226721316576, -0.14145934581756592, 0.05967523530125618, 0.0012620593188330531, 0.022834688425064087, -0.006457949057221413, 0.019824137911200523, 0.01201769057661295, -0.008374977856874466, -0.17133085429668427, 0.10926275700330734, 0.05712958052754402, 0.07278674095869064, -0.11029793322086334, -0.09423193335533142, 0.08543358743190765, 0.08337901532649994, 0.07521367818117142, 0.1679525524377823, 0.009455402381718159, 0.10020902752876282, 0.0012761354446411133, -0.16304467618465424, 0.14238449931144714, 0.07763040065765381, 0.027354633435606956, -0.0486031174659729, 0.031845953315496445, 0.08383899927139282, -0.01260432694107294, 0.05422443151473999, -0.10544756054878235, -0.1529548466205597, -0.07860290259122849, -0.07495969533920288, -0.0934741273522377, -0.041169799864292145, -0.04711431264877319, 0.1101241260766983, 0.040588781237602234, 0.024012833833694458, 0.05398844555020332, 0.00960939098149538, -0.041487276554107666, -0.05683491751551628, -0.07089775800704956, -0.1767972707748413, -0.03898166865110397, 0.04408988729119301, 0.036160990595817566, -0.008821964263916016, 0.40732723474502563, 0.07747570425271988, -0.06647084653377533, -0.03834793344140053, 0.15574416518211365, -0.050200920552015305, -0.04206893965601921, 0.2695296108722687, 0.049030572175979614, -0.03558986261487007, 0.07673084735870361, 0.06962790340185165, 0.0022284621372818947, 0.08459850400686264, 0.06522209942340851, 0.15462681651115417, 0.012404768727719784, -0.08136942237615585, -0.03313592076301575, -0.10477007925510406, 0.08684709668159485, -0.04676656424999237, -0.0242611113935709, 0.03961934521794319, 0.08736616373062134, -0.08219385147094727, 0.03700409457087517, -0.056809403002262115, 0.07746224105358124, 0.009120786562561989, -0.07220060378313065, -0.1331777274608612, -0.07895725965499878, -0.08071988075971603, -0.046540647745132446, 0.030141623690724373, -0.05623979866504669, -0.007478940766304731, 0.19811655580997467, 0.007649965584278107, -0.014827100560069084, 0.018258091062307358, -0.030422545969486237, -0.04752723500132561, 0.08713550120592117, 0.008764066733419895, -0.10025496780872345, 0.013602621853351593, -0.03621459752321243, 0.02997508831322193, -0.0077598183415830135, -0.027273301035165787, -0.03747406229376793, 0.07403068244457245, 0.0723603293299675, -0.1429263949394226, -0.027299698442220688, -0.0785832405090332, 0.013922082260251045, -0.048173073679208755, -0.006593849044293165, -0.006335960701107979, 0.01705986075103283, 0.026406515389680862, 0.16155372560024261, 0.037925392389297485, 0.04351655766367912, 0.01868724822998047, 0.11802391707897186, 0.03145439177751541, 0.005174271762371063, 0.023788761347532272, -0.019483234733343124, 0.020674891769886017, 0.12631171941757202, 0.16730301082134247, -0.013208648189902306, 0.030991850420832634, -0.07144057750701904, 0.04314587265253067, 0.08558358997106552, 0.10210427641868591, 0.038468822836875916, -0.004548314027488232, -0.02157805673778057, -0.07640047371387482, -0.018979482352733612, -0.018195854499936104, -0.11464666575193405, 0.0456417016685009, 0.07960692048072815, -0.08083862066268921, -0.11784806847572327, 0.10443121194839478, -0.0887378379702568, 0.08939015865325928, 0.04593885317444801, -0.09045146405696869, -0.1348046213388443, -0.09602393209934235, -0.020358361303806305, 0.11515812575817108, -0.029666787013411522, -0.08256053924560547, -0.019661225378513336, 0.08578445017337799, 0.015362300910055637, -0.42073380947113037, -0.16241073608398438, 0.07933008670806885, 0.032926369458436966, 0.0005355920875445008, -0.010845547541975975, 0.13626164197921753, 0.009244650602340698, -0.000011830795301648322, -0.07919618487358093, 0.06790413707494736, -0.02526831068098545, 0.018280306831002235, 0.025212576612830162, 0.06505429744720459, -0.0021703976672142744, -0.19476434588432312, 0.004733796697109938, -0.02375481277704239, -0.09044253081083298, 0.06930438429117203, 0.025044139474630356, -0.04291210696101189, 0.053071580827236176, -0.03710857406258583, 0.09394092857837677, 0.09271226823329926, -0.01163263525813818, -0.004244829062372446, -0.059668559581041336, 0.05882864445447922, 0.06344769150018692, -0.17661987245082855, -0.05737822875380516, 0.046538881957530975, -0.043724291026592255, -0.05387744680047035, -0.011712562292814255, -0.09657658636569977, 0.05718543007969856, -0.11695443093776703, -0.02704845368862152, -0.07322593778371811, 0.08323346078395844, -0.0057298485189676285, 0.048972878605127335, 0.0038827729877084494, 0.03700314834713936, 0.042552635073661804, 0.01991778239607811, -0.10792818665504456, -0.09621214866638184 ]
2e242dec7c712e911e6849b2ce17257d51de0517
# Dataset Card for "relabeled_alpacafarm_pythiasft_20K_preference_data_maxlength" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
Asap7772/relabeled_alpacafarm_pythiasft_20K_preference_data_maxlength
[ "region:us" ]
2024-01-25T07:37:09+00:00
{"configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}, {"split": "test", "path": "data/test-*"}]}], "dataset_info": {"features": [{"name": "output", "dtype": "string"}, {"name": "text", "dtype": "string"}, {"name": "alpaca_text", "dtype": "string"}, {"name": "prompt", "dtype": "string"}, {"name": "alpaca_prompt", "dtype": "string"}, {"name": "y_ref", "dtype": "string"}, {"name": "y_1", "dtype": "string"}, {"name": "y_2", "dtype": "string"}, {"name": "y_w", "dtype": "string"}, {"name": "y_w_alpaca", "dtype": "string"}, {"name": "y_l", "dtype": "string"}, {"name": "y_l_alpaca", "dtype": "string"}, {"name": "y_w_score", "dtype": "float64"}, {"name": "y_l_score", "dtype": "float64"}, {"name": "score_diff", "dtype": "float64"}], "splits": [{"name": "train", "num_bytes": 177945579, "num_examples": 19000}, {"name": "test", "num_bytes": 9378616, "num_examples": 1000}], "download_size": 86089134, "dataset_size": 187324195}}
2024-01-30T00:47:59+00:00
[]
[]
TAGS #region-us
# Dataset Card for "relabeled_alpacafarm_pythiasft_20K_preference_data_maxlength" More Information needed
[ "# Dataset Card for \"relabeled_alpacafarm_pythiasft_20K_preference_data_maxlength\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"relabeled_alpacafarm_pythiasft_20K_preference_data_maxlength\"\n\nMore Information needed" ]
[ 6, 35 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"relabeled_alpacafarm_pythiasft_20K_preference_data_maxlength\"\n\nMore Information needed" ]
[ -0.0777340903878212, 0.07193224877119064, -0.0030931681394577026, 0.034304648637771606, 0.10328741371631622, 0.037040386348962784, -0.0091284504160285, 0.10534945875406265, 0.0797068253159523, -0.002504503820091486, 0.0707491934299469, 0.04737633094191551, 0.037427064031362534, 0.21398021280765533, -0.06144474446773529, -0.11761884391307831, 0.017864903435111046, 0.08334764838218689, 0.08268818259239197, 0.09448231756687164, 0.04240933060646057, -0.1272798329591751, 0.08111519366502762, -0.08697264641523361, -0.21370737254619598, 0.09794580936431885, -0.027434518560767174, -0.11983208358287811, 0.053073301911354065, -0.05743313953280449, 0.15773092210292816, -0.027544302865862846, 0.003665069118142128, -0.08566492795944214, 0.02111281268298626, -0.004391611088067293, -0.031573597341775894, 0.07734213024377823, 0.1102178767323494, -0.046261850744485855, 0.08786343783140182, -0.039473846554756165, -0.0370374396443367, -0.00848982110619545, -0.13018570840358734, -0.16181588172912598, -0.09412677586078644, 0.010590557008981705, 0.024875981733202934, 0.010153915733098984, 0.051645368337631226, 0.10059195756912231, -0.13826540112495422, 0.058886367827653885, 0.19052651524543762, -0.08027924597263336, 0.023621734231710434, 0.08150506764650345, 0.007154202554374933, 0.06291459500789642, -0.021067380905151367, 0.023643959313631058, 0.07646146416664124, -0.00542031042277813, 0.0445428304374218, -0.05339555814862251, -0.1648096889257431, 0.08057045191526413, -0.051021378487348557, -0.058532148599624634, 0.254305899143219, 0.04198198765516281, 0.020858028903603554, -0.002807990647852421, -0.08606787025928497, -0.0469428226351738, -0.012428386136889458, 0.061089325696229935, 0.06856951117515564, -0.052007947117090225, 0.054908979684114456, 0.04393910989165306, -0.10063724219799042, -0.030974019318819046, -0.20306357741355896, 0.1013420820236206, -0.034097906202077866, 0.13266773521900177, -0.09104909002780914, -0.014043707400560379, -0.06706546247005463, -0.038769084960222244, 0.02666240558028221, -0.06363297998905182, -0.013707969337701797, -0.05096229910850525, -0.03255217522382736, 0.011238044127821922, 0.052095040678977966, 0.01170351356267929, 0.043151818215847015, -0.018022209405899048, 0.008495449088513851, 0.04808412119746208, 0.1252468079328537, -0.09767956286668777, -0.10420957952737808, 0.017268981784582138, 0.007419057656079531, -0.01909647323191166, -0.0038123230915516615, -0.017144102603197098, -0.050637487322092056, -0.05990135297179222, 0.022681940346956253, 0.05872369185090065, -0.049407247453927994, -0.0679369792342186, -0.08292559534311295, -0.0833030417561531, 0.15922537446022034, -0.12517209351062775, -0.027174711227416992, 0.00019444756617303938, -0.13080242276191711, 0.1210998147726059, -0.042868662625551224, -0.06218414381146431, 0.010257796384394169, 0.08881349861621857, -0.09437340497970581, -0.013380147516727448, -0.06322386860847473, -0.059790823608636856, 0.08820139616727829, -0.10458888113498688, 0.0512554794549942, -0.10892074555158615, -0.2300451099872589, -0.000755825312808156, 0.02360990084707737, -0.039838552474975586, 0.04843215271830559, 0.0793466717004776, 0.045350994914770126, -0.03958861529827118, -0.07059338688850403, 0.14434558153152466, -0.08184386789798737, 0.09918607026338577, 0.0027493885718286037, 0.13318932056427002, -0.05436345189809799, 0.046654559671878815, -0.07774436473846436, -0.007048422936350107, -0.08328337967395782, 0.009350510314106941, -0.026492802426218987, 0.19666332006454468, -0.12744328379631042, -0.08909627795219421, -0.0534551776945591, -0.00937630981206894, -0.05583852156996727, 0.015354348346590996, -0.18881212174892426, -0.059324026107788086, 0.15665999054908752, -0.0595475398004055, -0.1037859171628952, -0.016050565987825394, -0.030035503208637238, -0.0569356232881546, 0.0984523594379425, 0.19967414438724518, 0.042698901146650314, -0.05619242414832115, -0.03710911050438881, 0.06851823627948761, -0.13406535983085632, -0.29056644439697266, 0.07831791043281555, 0.04150574654340744, -0.05061424523591995, 0.05371236801147461, 0.13271108269691467, 0.03988216817378998, -0.019020184874534607, -0.08143055438995361, -0.03014858067035675, -0.14332908391952515, -0.019746020436286926, 0.031906161457300186, 0.022277604788541794, -0.006594968494027853, 0.1312987208366394, 0.05657254904508591, 0.07817874848842621, 0.032360102981328964, -0.02719312533736229, 0.010018766857683659, 0.10869452357292175, -0.06786580383777618, -0.009053654968738556, -0.17183387279510498, -0.00038920773658901453, -0.008762963116168976, -0.049360956996679306, -0.06690715998411179, 0.09587863087654114, 0.015383763238787651, -0.04104816913604736, -0.013626456260681152, 0.10168717801570892, 0.07000192999839783, -0.02412293665111065, -0.011991875246167183, 0.03342403843998909, 0.008897863328456879, -0.033252544701099396, -0.07173854112625122, -0.058876704424619675, -0.06960499286651611, 0.10215561091899872, 0.06472539901733398, 0.008175820112228394, 0.01590866968035698, 0.08635880053043365, -0.007389881182461977, 0.006602615118026733, -0.02844432182610035, 0.042082879692316055, -0.08494284749031067, 0.06156808137893677, -0.026896551251411438, -0.04450111836194992, 0.2764906585216522, 0.11810754239559174, -0.012603214010596275, -0.004841682501137257, -0.09643909335136414, -0.03254319354891777, -0.030131125822663307, -0.060905881226062775, 0.07663889974355698, -0.07323276996612549, 0.006612998899072409, 0.13788047432899475, -0.08627091348171234, 0.015509843826293945, 0.041522782295942307, -0.009534182026982307, -0.03746175020933151, 0.0506930947303772, 0.05372900515794754, -0.3062955141067505, 0.09953878074884415, 0.14692793786525726, 0.10142991691827774, 0.12087929248809814, -0.05263245478272438, -0.10134222358465195, 0.03153885900974274, -0.028401073068380356, -0.09308185428380966, 0.07661543786525726, 0.08058367669582367, -0.000005624691766570322, 0.1033625677227974, -0.0686744898557663, 0.03817962855100632, -0.07700874656438828, -0.05402598902583122, -0.0137691181153059, -0.009177697822451591, -0.10922861099243164, 0.00381075544282794, 0.045569874346256256, 0.09522347897291183, -0.012096860446035862, -0.028486991301178932, 0.095476433634758, 0.00809404719620943, 0.0256696417927742, 0.17954063415527344, -0.10328849405050278, -0.30624523758888245, -0.017040658742189407, -0.12373831123113632, 0.016368579119443893, 0.008405016735196114, -0.03816754370927811, -0.06567525118589401, -0.08163783699274063, 0.07110404223203659, -0.057753466069698334, -0.06592505425214767, -0.002312813187018037, -0.05591154098510742, 0.014414435252547264, 0.0009440905414521694, -0.09971408545970917, 0.02878011390566826, -0.06330996006727219, 0.06051725149154663, 0.15490196645259857, -0.0473531037569046, 0.05823956057429314, 0.08493269234895706, -0.05765904486179352, 0.022395340725779533, 0.02378256618976593, 0.07279161363840103, -0.013034801930189133, 0.014203931204974651, 0.143595352768898, 0.10156724601984024, 0.038523606956005096, 0.09382569789886475, 0.06723152846097946, -0.10340803861618042, -0.004861185792833567, 0.012997348792850971, -0.13618159294128418, -0.24864742159843445, -0.15591110289096832, -0.050130609422922134, -0.0009688297868706286, 0.191132053732872, 0.04251524806022644, -0.03409689664840698, 0.09747214615345001, 0.044083673506975174, 0.06294310837984085, -0.19561631977558136, -0.04149673879146576, 0.12386738508939743, 0.004876400344073772, 0.06018206104636192, -0.10116519033908844, -0.026975568383932114, 0.1441592127084732, 0.15382349491119385, 0.27383455634117126, 0.021890025585889816, 0.058433499187231064, 0.007818033918738365, 0.07011864334344864, 0.09681858122348785, 0.1708633154630661, 0.06161452457308769, -0.048362795263528824, -0.009043860249221325, 0.017456378787755966, -0.03273020684719086, 0.022171778604388237, 0.05730345472693443, -0.12430941313505173, 0.02433202601969242, -0.11179392039775848, 0.023270945996046066, 0.025782685726881027, 0.020479047670960426, -0.253568172454834, 0.08014976978302002, -0.01580706238746643, 0.030507991090416908, -0.11404462158679962, 0.05542860925197601, 0.019094077870249748, -0.03435462713241577, 0.1178385391831398, -0.05060286074876785, 0.07540662586688995, -0.06427423655986786, 0.024736084043979645, -0.01864316500723362, -0.017557663843035698, 0.021065352484583855, 0.14206281304359436, -0.19527338445186615, 0.22060534358024597, 0.04143066331744194, -0.09081261605024338, -0.11023491621017456, -0.08982893824577332, -0.03324497118592262, -0.011593922041356564, 0.1059933751821518, 0.03326854109764099, -0.09073370695114136, -0.13552144169807434, -0.14376279711723328, -0.04218291863799095, 0.027368389070034027, 0.07589489966630936, -0.04824664816260338, 0.06134263798594475, -0.005873437505215406, -0.07025459408760071, -0.08980239927768707, -0.007724658586084843, -0.0940610021352768, 0.03228817135095596, 0.09153623878955841, -0.1309402734041214, 0.002494162879884243, -0.04271111637353897, -0.0559864267706871, 0.1431542932987213, 0.09051235020160675, -0.10950525104999542, -0.10666083544492722, 0.05995391681790352, 0.16235731542110443, -0.0367802157998085, 0.07564408332109451, -0.0023734343703836203, 0.00902273878455162, -0.03577544540166855, -0.1680392175912857, 0.10338899493217468, -0.040232691913843155, 0.08279440551996231, -0.03276645764708519, 0.09752547740936279, -0.025574086233973503, 0.007635276298969984, 0.02320869453251362, 0.014015383087098598, -0.09038614481687546, -0.04544232040643692, 0.04674024507403374, -0.03510334715247154, 0.17445485293865204, 0.17745691537857056, -0.0011602945160120726, 0.01797603629529476, 0.08606114238500595, 0.005371013656258583, 0.16307707130908966, 0.0015388806350529194, -0.055474717170000076, 0.1881019026041031, 0.09991907328367233, 0.018605470657348633, -0.21381299197673798, -0.023006074130535126, -0.04404181241989136, 0.002928543835878372, -0.007217355538159609, -0.2873958647251129, 0.16276811063289642, 0.15835727751255035, -0.030603846535086632, 0.2180624008178711, -0.10099264979362488, -0.022367076948285103, 0.19083671271800995, 0.024438751861453056, 0.3734257221221924, -0.09983591735363007, -0.030738282948732376, -0.1361793577671051, -0.044188011437654495, 0.20464524626731873, -0.10441438853740692, 0.07343874126672745, -0.13251075148582458, 0.19753821194171906, 0.021346068009734154, -0.06632696837186813, 0.12761107087135315, 0.11663216352462769, 0.11897476762533188, -0.08942591398954391, -0.012009109370410442, 0.10473156720399857, -0.027608226984739304, 0.09548793733119965, -0.03147796168923378, 0.042672090232372284, -0.1322837769985199, -0.02426115982234478, -0.032564926892519, 0.04500611126422882, 0.07458782196044922, -0.021275050938129425, -0.08759056776762009, 0.0049132551066577435, -0.058318134397268295, -0.024914894253015518, -0.003114972496405244, 0.035033900290727615, 0.02035643719136715, 0.15589119493961334, -0.026265282183885574, -0.13792599737644196, -0.06461656093597412, -0.07768862694501877, -0.03784305229783058, 0.08027629554271698, -0.242056742310524, 0.02989375777542591, 0.15103472769260406, 0.03973521292209625, -0.014912718906998634, 0.001721099717542529, -0.04942714795470238, -0.047592535614967346, 0.19434618949890137, -0.14364618062973022, 0.03643950819969177, 0.0317484512925148, 0.029400242492556572, -0.05283749848604202, -0.02950914204120636, -0.013318387791514397, 0.049296412616968155, -0.047590795904397964, -0.02088874951004982, 0.022101325914263725, -0.010286052711308002, 0.1579751968383789, 0.09018845111131668, 0.056295670568943024, -0.19907014071941376, 0.1495705395936966, 0.08237133920192719, -0.08831353485584259, -0.05158811807632446, -0.002995122689753771, -0.09933727234601974, -0.10946489870548248, 0.007626562844961882, 0.04374969005584717, -0.11100345104932785, -0.07936287671327591, -0.005234942305833101, -0.08249408006668091, 0.05479472503066063, 0.05281394347548485, 0.04202541708946228, 0.09445390850305557, -0.01098691113293171, -0.11323028057813644, -0.03454699367284775, -0.040460098534822464, -0.025736751034855843, 0.040291305631399155, -0.09145842492580414, -0.0438644215464592, -0.06578286737203598, 0.1623704731464386, -0.04334188625216484, 0.009064200334250927, -0.02838750183582306, 0.018317610025405884, -0.1515953242778778, 0.033324409276247025, 0.02188500389456749, 0.01298728957772255, -0.0338304229080677, 0.06939263641834259, -0.011635925620794296, -0.02251007780432701, -0.15296179056167603, 0.06625218689441681, 0.05161745846271515, 0.03975711762905121, -0.09073935449123383, -0.08001963794231415, 0.056407488882541656, 0.06995973736047745, 0.06900962442159653, 0.11128131300210953, 0.014117971062660217, 0.06293663382530212, 0.061112601310014725, -0.1783360093832016, 0.15403397381305695, 0.09264697879552841, 0.051357608288526535, 0.0171365849673748, 0.05005493015050888, 0.1025196835398674, 0.025383777916431427, 0.05805042013525963, -0.16287142038345337, -0.14785921573638916, -0.07690462470054626, -0.12969477474689484, -0.06614518910646439, -0.024946551769971848, -0.06171710789203644, 0.08713815361261368, 0.06059735640883446, 0.04378417506814003, 0.039456065744161606, -0.020257731899619102, -0.07727321982383728, -0.04316753149032593, -0.08547334372997284, -0.18211299180984497, -0.02396411821246147, 0.04376768320798874, 0.0575251430273056, -0.014074601233005524, 0.397185355424881, 0.08397511392831802, -0.06865086406469345, -0.034685321152210236, 0.15979020297527313, -0.05115247145295143, -0.047459956258535385, 0.3588288724422455, 0.06562606990337372, -0.03312363475561142, 0.07206787168979645, 0.08839105069637299, 0.05636097490787506, 0.13994517922401428, 0.07089179754257202, 0.18452689051628113, -0.017201503738760948, -0.052215881645679474, -0.0516028068959713, -0.13508020341396332, 0.04900484159588814, 0.014936060644686222, -0.04328494518995285, 0.06116838380694389, 0.08078810572624207, -0.06536310911178589, 0.04991656541824341, -0.06899731606245041, 0.028414607048034668, -0.005109365098178387, -0.0544104166328907, -0.11262555420398712, -0.03789058327674866, -0.08148880302906036, -0.054784808307886124, 0.023120982572436333, -0.060817182064056396, 0.001474230783060193, 0.18528145551681519, -0.0033312265295535326, -0.0113495709374547, 0.03223874792456627, -0.034362584352493286, -0.03855574503540993, 0.09702948480844498, 0.021467477083206177, -0.0864705741405487, 0.05068099871277809, -0.0172126442193985, 0.03675573691725731, -0.046076808124780655, -0.01676439866423607, -0.05376739054918289, 0.07625380903482437, 0.07380269467830658, -0.10643009841442108, -0.04017927870154381, -0.09076821058988571, 0.024588976055383682, -0.012625612318515778, 0.021839028224349022, 0.013167132623493671, 0.0014733436983078718, 0.0007141873356886208, 0.17135247588157654, 0.0053246160969138145, 0.0050379433669149876, 0.027916600927710533, 0.10605749487876892, 0.04812215268611908, 0.018515240401029587, -0.0018874595407396555, -0.016234252601861954, -0.009297765791416168, 0.15371893346309662, 0.17687878012657166, -0.05206811800599098, 0.016073821112513542, -0.06689807772636414, 0.03216695412993431, 0.055347759276628494, 0.12140697985887527, 0.07503841072320938, 0.02866625227034092, -0.02823394164443016, -0.09331010282039642, -0.009144254960119724, -0.004237355664372444, -0.13818812370300293, 0.04756622388958931, 0.07257958501577377, -0.06977985054254532, -0.10313384979963303, 0.046568065881729126, -0.040457069873809814, 0.16367797553539276, 0.0438663624227047, -0.09657607972621918, -0.11816676706075668, -0.07142570614814758, 0.009840011596679688, 0.10164981335401535, -0.011168018914759159, -0.09402517974376678, 0.012755010277032852, 0.032648514956235886, -0.005407458636909723, -0.41490355134010315, -0.13991203904151917, 0.07437118887901306, 0.05214500054717064, 0.010710745118558407, -0.00860639102756977, 0.10511759668588638, 0.030149471014738083, -0.021438468247652054, -0.10045144706964493, 0.0813852995634079, -0.047704558819532394, -0.014948120340704918, 0.05964237451553345, 0.08685518056154251, -0.01592119038105011, -0.19858470559120178, 0.01730453595519066, -0.019544852897524834, -0.12476012110710144, 0.06906209886074066, 0.0624508373439312, -0.035913433879613876, 0.025263208895921707, -0.01034972071647644, 0.10095930844545364, 0.09109479188919067, -0.023699240759015083, -0.01831415668129921, -0.04670087993144989, 0.08837781846523285, 0.0476900115609169, -0.15171386301517487, -0.055450692772865295, 0.020593486726284027, -0.08396042883396149, -0.034807465970516205, -0.018750619143247604, -0.15237919986248016, 0.051379814743995667, -0.1070159375667572, -0.025376606732606888, -0.08343042433261871, 0.08063007146120071, 0.02191794477403164, 0.04797816649079323, -0.004110331647098064, 0.05461663752794266, 0.030339447781443596, 0.00009876622061710805, -0.11396623402833939, -0.09928194433450699 ]
72a43b85b8cb31a3800e21500ec9b8e8fc56b5ea
# Dataset Card for "relabeled_alpacafarm_pythiasft_20K_preference_data_minlength" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
Asap7772/relabeled_alpacafarm_pythiasft_20K_preference_data_minlength
[ "region:us" ]
2024-01-25T07:37:20+00:00
{"configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}, {"split": "test", "path": "data/test-*"}]}], "dataset_info": {"features": [{"name": "output", "dtype": "string"}, {"name": "text", "dtype": "string"}, {"name": "alpaca_text", "dtype": "string"}, {"name": "prompt", "dtype": "string"}, {"name": "alpaca_prompt", "dtype": "string"}, {"name": "y_ref", "dtype": "string"}, {"name": "y_1", "dtype": "string"}, {"name": "y_2", "dtype": "string"}, {"name": "y_w", "dtype": "string"}, {"name": "y_w_alpaca", "dtype": "string"}, {"name": "y_l", "dtype": "string"}, {"name": "y_l_alpaca", "dtype": "string"}, {"name": "y_w_score", "dtype": "float64"}, {"name": "y_l_score", "dtype": "float64"}, {"name": "score_diff", "dtype": "float64"}], "splits": [{"name": "train", "num_bytes": 177945579, "num_examples": 19000}, {"name": "test", "num_bytes": 9378616, "num_examples": 1000}], "download_size": 86093422, "dataset_size": 187324195}}
2024-01-30T00:48:20+00:00
[]
[]
TAGS #region-us
# Dataset Card for "relabeled_alpacafarm_pythiasft_20K_preference_data_minlength" More Information needed
[ "# Dataset Card for \"relabeled_alpacafarm_pythiasft_20K_preference_data_minlength\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"relabeled_alpacafarm_pythiasft_20K_preference_data_minlength\"\n\nMore Information needed" ]
[ 6, 35 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"relabeled_alpacafarm_pythiasft_20K_preference_data_minlength\"\n\nMore Information needed" ]
[ -0.07703772932291031, 0.0956612080335617, -0.0034096711315214634, 0.03416358307003975, 0.09777448326349258, 0.02441837079823017, 0.009722944349050522, 0.10566063970327377, 0.03804260119795799, -0.0022725253365933895, 0.08129212260246277, 0.060817841440439224, 0.04468970745801926, 0.1951429545879364, -0.08092112094163895, -0.13966986536979675, 0.022069910541176796, 0.08502513915300369, 0.09954999387264252, 0.09203255921602249, 0.03969426825642586, -0.11373984813690186, 0.08020147681236267, -0.08620169013738632, -0.1974981278181076, 0.0837782472372055, -0.02460673823952675, -0.1311059147119522, 0.05381540581583977, -0.049281224608421326, 0.16870367527008057, -0.024073243141174316, 0.008617492392659187, -0.08946378529071808, 0.02519283816218376, -0.010465072467923164, -0.029663361608982086, 0.08330083638429642, 0.09885779768228531, -0.05034150555729866, 0.07196959853172302, -0.03673644736409187, -0.02199634537100792, -0.016666946932673454, -0.13705435395240784, -0.16164851188659668, -0.09733117371797562, 0.013538593426346779, 0.015884241089224815, 0.014049394987523556, 0.048475231975317, 0.09885457158088684, -0.14246900379657745, 0.061237700283527374, 0.19481565058231354, -0.041356801986694336, 0.01807042397558689, 0.08397820591926575, -0.011699655093252659, 0.06934341788291931, -0.01738053373992443, 0.03824738413095474, 0.06150614470243454, -0.0037421989254653454, 0.033230479806661606, -0.05639955773949623, -0.1864960938692093, 0.08344243466854095, -0.04888534918427467, -0.07110138982534409, 0.2295234650373459, 0.03825095668435097, 0.025828862562775612, 0.0005723105277866125, -0.08274081349372864, -0.03470491245388985, -0.034108612686395645, 0.061985258013010025, 0.06594385951757431, -0.044571734964847565, 0.045885879546403885, 0.032940346747636795, -0.10069993138313293, -0.03354567289352417, -0.21011292934417725, 0.12648575007915497, -0.032557904720306396, 0.1300479769706726, -0.11487410962581635, -0.008391544222831726, -0.06115555390715599, -0.0437481664121151, 0.025528689846396446, -0.058760546147823334, 0.02418549545109272, -0.04723159968852997, -0.03670655936002731, 0.011154871433973312, 0.058235105127096176, 0.026690255850553513, 0.03378592059016228, -0.007105300668627024, -0.003372764214873314, 0.04482544958591461, 0.10489622503519058, -0.07273649424314499, -0.08682619035243988, 0.024532733485102654, 0.005062275566160679, -0.01312024425715208, 0.004128407686948776, -0.03252735733985901, -0.05716940388083458, -0.0631856918334961, 0.030483247712254524, 0.06106574833393097, -0.04514462128281593, -0.060497865080833435, -0.08123157173395157, -0.07664858549833298, 0.14596083760261536, -0.12844517827033997, -0.020017405971884727, -0.004558734130114317, -0.1317630261182785, 0.12039776146411896, -0.028746001422405243, -0.055915214121341705, 0.00261979503557086, 0.09425823390483856, -0.09793141484260559, -0.008248573169112206, -0.06363092362880707, -0.06582457572221756, 0.08241593092679977, -0.1079145222902298, 0.060006916522979736, -0.11786802858114243, -0.21695579588413239, -0.002406260697171092, 0.018691422417759895, -0.03484170883893967, 0.07166312634944916, 0.05845978483557701, 0.03302152454853058, -0.039208415895700455, -0.06201114505529404, 0.13468021154403687, -0.0773656815290451, 0.09896668046712875, -0.014649811200797558, 0.1371808648109436, -0.06229689344763756, 0.058769211173057556, -0.076405830681324, -0.01145586185157299, -0.07148095965385437, 0.012249305844306946, -0.044432781636714935, 0.18783940374851227, -0.11206548660993576, -0.08238270878791809, -0.0600968673825264, -0.01161767914891243, -0.04696642979979515, 0.017806408926844597, -0.1695496141910553, -0.06752396374940872, 0.16703762114048004, -0.060538843274116516, -0.06799052655696869, -0.023609180003404617, -0.02829000912606716, -0.05797618627548218, 0.10099776834249496, 0.2355203777551651, 0.07276005297899246, -0.05662316828966141, -0.0005126383621245623, 0.07667805999517441, -0.13125117123126984, -0.2815595865249634, 0.06504450738430023, 0.03978371247649193, -0.07848920673131943, 0.06180592253804207, 0.12711265683174133, 0.03465728089213371, -0.0317191407084465, -0.07965308427810669, -0.017167719081044197, -0.1309407353401184, -0.018835213035345078, 0.03790516033768654, 0.021054865792393684, -0.0043656774796545506, 0.1344810277223587, 0.04347691312432289, 0.08876775950193405, 0.026590565219521523, -0.023600421845912933, 0.004401148296892643, 0.12489792704582214, -0.03198159486055374, -0.011252421885728836, -0.1788983941078186, -0.007096610963344574, -0.011378130875527859, -0.05987302586436272, -0.0606679767370224, 0.07771036028862, 0.010629336349666119, -0.03616797551512718, -0.006816561799496412, 0.10574118047952652, 0.04466550797224045, -0.017075099050998688, -0.01925538294017315, 0.03525036200881004, 0.029629893600940704, -0.04075177386403084, -0.04513993859291077, -0.029046937823295593, -0.06842735409736633, 0.10225781053304672, 0.06144622340798378, 0.00835852138698101, 0.019147489219903946, 0.07938183844089508, -0.002694230293855071, 0.019707299768924713, -0.02550450898706913, 0.04136490076780319, -0.08354341983795166, 0.06810515373945236, -0.03575058653950691, -0.05357050895690918, 0.24701394140720367, 0.12699532508850098, -0.006737031042575836, 0.0018639352638274431, -0.13235437870025635, -0.025905560702085495, -0.020742449909448624, -0.06338751316070557, 0.07477102428674698, -0.06414137780666351, 0.002167284721508622, 0.13855084776878357, -0.0928172767162323, 0.02883368544280529, 0.028053930029273033, -0.029861746355891228, -0.04749542474746704, 0.05851172283291817, 0.04762231931090355, -0.2874269187450409, 0.1043558195233345, 0.14229357242584229, 0.09837142378091812, 0.11587587743997574, -0.05350859463214874, -0.10475067794322968, 0.029298564419150352, -0.03972575441002846, -0.09161970019340515, 0.07661252468824387, 0.07491473853588104, 0.0006740762037225068, 0.1083025187253952, -0.06297444552183151, 0.05010540783405304, -0.09265653789043427, -0.05748923495411873, -0.015505255199968815, -0.011025890707969666, -0.1174590215086937, 0.002903502434492111, 0.045831188559532166, 0.09713079780340195, -0.015177435241639614, -0.040150292217731476, 0.0821443647146225, 0.013120735064148903, 0.02727683074772358, 0.17839546501636505, -0.12880000472068787, -0.31228017807006836, -0.034667134284973145, -0.1190347671508789, 0.01828017458319664, 0.0063150920905172825, -0.026567168533802032, -0.06629886478185654, -0.08279740810394287, 0.059466999024152756, -0.04105197265744209, -0.055423423647880554, -0.009140047244727612, -0.038625530898571014, 0.01178900245577097, -0.0272182859480381, -0.10732671618461609, 0.021084634587168694, -0.07343598455190659, 0.09477723389863968, 0.17078448832035065, -0.06620137393474579, 0.04980657994747162, 0.09406692534685135, -0.06031564995646477, 0.02598307654261589, 0.02348707802593708, 0.09052519500255585, -0.004542664159089327, 0.024833736941218376, 0.13541708886623383, 0.09318196773529053, 0.05509716272354126, 0.0954216942191124, 0.06536094844341278, -0.10858713835477829, -0.010864808224141598, 0.01245846226811409, -0.12681737542152405, -0.24613818526268005, -0.1542842835187912, -0.033882953226566315, 0.014298924244940281, 0.16902518272399902, 0.032925281673669815, -0.007758438587188721, 0.10083502531051636, 0.04660310223698616, 0.046365320682525635, -0.17586661875247955, -0.0424826517701149, 0.15774255990982056, -0.015729721635580063, 0.05617290735244751, -0.09047643840312958, -0.02075965888798237, 0.14991673827171326, 0.14323943853378296, 0.27374181151390076, 0.03667605295777321, 0.0401451550424099, 0.005371727980673313, 0.07794778794050217, 0.1001368910074234, 0.1560039222240448, 0.07828578352928162, -0.047712404280900955, -0.01604926399886608, 0.02803894691169262, -0.04652595892548561, 0.014953268691897392, 0.05707894265651703, -0.13331371545791626, 0.028968332335352898, -0.07378672808408737, 0.014406906440854073, 0.022155746817588806, 0.01927683688700199, -0.2594408392906189, 0.07731394469738007, -0.021991398185491562, 0.02966468594968319, -0.11528424173593521, 0.062172237783670425, 0.01889084465801716, -0.03325093165040016, 0.09618200361728668, -0.05069516599178314, 0.0830434188246727, -0.06603482365608215, 0.023428818210959435, -0.02793845534324646, -0.017957350239157677, 0.015066583640873432, 0.1336604207754135, -0.1958594024181366, 0.24095064401626587, 0.0430002324283123, -0.10055562853813171, -0.1071828082203865, -0.08549550175666809, -0.030028123408555984, 0.01131292711943388, 0.09830440580844879, 0.03272118419408798, -0.09956224262714386, -0.15583260357379913, -0.1679784059524536, -0.04671255871653557, 0.055271562188863754, 0.07332729548215866, -0.04771571606397629, 0.04712740704417229, 0.006581735797226429, -0.06733221560716629, -0.12504874169826508, -0.008316478691995144, -0.09293829649686813, 0.036183424293994904, 0.09125714749097824, -0.12513791024684906, -0.0014500429388135672, -0.03820681571960449, -0.045015547424554825, 0.09706392139196396, 0.10408560186624527, -0.1128050833940506, -0.11565371602773666, 0.06667856872081757, 0.14980864524841309, -0.04467650502920151, 0.07834763079881668, 0.0018457386177033186, -0.0038922231178730726, -0.034401778131723404, -0.16418226063251495, 0.10101928561925888, -0.04051624611020088, 0.08847153931856155, -0.02845633588731289, 0.09725528210401535, -0.02325950190424919, 0.004301209468394518, 0.025765229016542435, 0.01044371910393238, -0.09654964506626129, -0.04764750972390175, 0.04831194505095482, -0.01805906742811203, 0.16398152709007263, 0.16458362340927124, -0.018167391419410706, -0.0021541721653193235, 0.07156643271446228, 0.0107443081215024, 0.16093938052654266, -0.001547476975247264, -0.055349793285131454, 0.1835946887731552, 0.1320672631263733, 0.01892050728201866, -0.2327904850244522, -0.02296837791800499, -0.04344211891293526, -0.000804384530056268, 0.009283292107284069, -0.28689277172088623, 0.1516355723142624, 0.15699507296085358, -0.026232263073325157, 0.18811553716659546, -0.12232460081577301, -0.020426880568265915, 0.21475709974765778, 0.04143662005662918, 0.3558439016342163, -0.0989515632390976, -0.03919604793190956, -0.11521085351705551, -0.06159619241952896, 0.18353040516376495, -0.10425218939781189, 0.07154799997806549, -0.1205330640077591, 0.22806547582149506, 0.03015156090259552, -0.057130273431539536, 0.14295300841331482, 0.11511991173028946, 0.1349724978208542, -0.09586098045110703, -0.050643421709537506, 0.09609217941761017, -0.029309596866369247, 0.08448216319084167, 0.001101890578866005, 0.061727024614810944, -0.0991031751036644, -0.016494987532496452, -0.03543480113148689, 0.04087035357952118, 0.06895346194505692, -0.027198459953069687, -0.09084436297416687, 0.005888131447136402, -0.06654258072376251, -0.03507542237639427, -0.00029135082149878144, 0.02835995703935623, -0.010825016535818577, 0.15069091320037842, -0.04329798370599747, -0.1312086582183838, -0.024003485217690468, -0.08173677325248718, -0.036740366369485855, 0.07739923149347305, -0.22329258918762207, 0.02345484122633934, 0.1431027352809906, 0.039375972002744675, 0.0006759898387826979, -0.011411416344344616, -0.03706888481974602, -0.05646685138344765, 0.1816745102405548, -0.13701854646205902, 0.024092484265565872, 0.02609705924987793, 0.013432084582746029, -0.03608029708266258, -0.03732137754559517, -0.015290678478777409, 0.06260670721530914, -0.04800358787178993, -0.020298361778259277, 0.03179231658577919, -0.003561768215149641, 0.14324456453323364, 0.10179655253887177, 0.03555161505937576, -0.20203495025634766, 0.15884172916412354, 0.08314213156700134, -0.09782537817955017, -0.05303007364273071, -0.0035525374114513397, -0.11248023062944412, -0.10832979530096054, -0.0065770759247243404, 0.06662525236606598, -0.1237318143248558, -0.08254341781139374, -0.00710288668051362, -0.07567208260297775, 0.05094116926193237, 0.06527388095855713, 0.036967385560274124, 0.09350774437189102, -0.012910937890410423, -0.0927908718585968, -0.024311315268278122, -0.029152048751711845, -0.019572671502828598, 0.03758738934993744, -0.08212272822856903, -0.03957267850637436, -0.06589668989181519, 0.15927480161190033, -0.03937314450740814, 0.0019038226455450058, -0.0337802954018116, 0.023600103333592415, -0.15761983394622803, 0.04262444004416466, 0.016642408445477486, 0.017261173576116562, -0.021806329488754272, 0.05697403848171234, -0.005911143496632576, -0.017096569761633873, -0.146897554397583, 0.0729224756360054, 0.04163748025894165, 0.0458829440176487, -0.09367459267377853, -0.08741539716720581, 0.06361790001392365, 0.0782553032040596, 0.06620005518198013, 0.12768082320690155, 0.022234683856368065, 0.06446454674005508, 0.05986901745200157, -0.17021243274211884, 0.15149858593940735, 0.09415370225906372, 0.05125204101204872, 0.002530028112232685, 0.03328367695212364, 0.08920800685882568, 0.013996437191963196, 0.057253073900938034, -0.1280777007341385, -0.14681150019168854, -0.06651334464550018, -0.1289052516222, -0.08397743850946426, -0.03380286321043968, -0.05156616121530533, 0.09866423904895782, 0.04610614851117134, 0.035977572202682495, 0.0438849963247776, -0.012329522520303726, -0.062012847512960434, -0.04078172147274017, -0.07502896338701248, -0.18716952204704285, -0.02836284041404724, 0.028005708009004593, 0.05318678170442581, -0.01762554422020912, 0.3614097535610199, 0.10827670991420746, -0.07929377257823944, -0.030960189178586006, 0.16196471452713013, -0.058344222605228424, -0.04570559784770012, 0.33956876397132874, 0.0748668909072876, -0.038199085742235184, 0.06378605961799622, 0.08136164397001266, 0.0510530099272728, 0.12472780048847198, 0.07342174649238586, 0.18970590829849243, -0.009082701988518238, -0.043509889394044876, -0.055072274059057236, -0.10415681451559067, 0.00821004994213581, 0.02278054505586624, -0.044427741318941116, 0.04671031981706619, 0.07204832136631012, -0.07122020423412323, 0.05529366433620453, -0.07521224766969681, 0.04548817127943039, 0.016593463718891144, -0.06351298093795776, -0.11205711215734482, -0.06221304461359978, -0.08686163276433945, -0.05784735083580017, 0.014383460395038128, -0.06963028758764267, 0.016811557114124298, 0.183528333902359, -0.008235083892941475, -0.00453985808417201, 0.04996205493807793, -0.06300786137580872, -0.05292050540447235, 0.09526747465133667, 0.017748944461345673, -0.0845935195684433, 0.01731657236814499, -0.03383439779281616, 0.03508159890770912, -0.03260573372244835, -0.013670616783201694, -0.05080104246735573, 0.07900261133909225, 0.06117499619722366, -0.12903599441051483, -0.041586630046367645, -0.08752532303333282, 0.015201719477772713, -0.024092160165309906, 0.006659471895545721, 0.009975687600672245, 0.0050064874812960625, 0.01288766972720623, 0.18672193586826324, 0.0005209767259657383, -0.006612994242459536, 0.01732289418578148, 0.10353575646877289, 0.042182669043540955, 0.03236139193177223, 0.004173885565251112, -0.02312367968261242, -0.005250006448477507, 0.1707596629858017, 0.1604139357805252, -0.04914562404155731, 0.0175703763961792, -0.05034347623586655, 0.03645925968885422, 0.051232825964689255, 0.1119125708937645, 0.07277990132570267, 0.0049376399256289005, -0.022952763363718987, -0.12007725983858109, -0.012931414879858494, -0.023277876898646355, -0.1474672257900238, 0.04898279160261154, 0.0759672150015831, -0.08290902525186539, -0.1167578473687172, 0.05178574100136757, -0.04170237481594086, 0.1593073010444641, 0.04260120540857315, -0.09032738953828812, -0.11620067059993744, -0.06549684703350067, 0.010425866581499577, 0.09287939220666885, -0.0032045734114944935, -0.08219011127948761, 0.004454798996448517, 0.06852292269468307, -0.005161971319466829, -0.40121597051620483, -0.1326766312122345, 0.08779768645763397, 0.03392457589507103, 0.008032373152673244, -0.010366360656917095, 0.09182141721248627, 0.02647874690592289, -0.010746678337454796, -0.08317999541759491, 0.09177659451961517, -0.026550300419330597, 0.007990483194589615, 0.04761093854904175, 0.07477495819330215, -0.017961718142032623, -0.19103866815567017, 0.023689528927206993, -0.03278316184878349, -0.11968644708395004, 0.06532445549964905, 0.049597062170505524, -0.024660466238856316, 0.020016970112919807, -0.01339039672166109, 0.10660196095705032, 0.08379030972719193, -0.014692738652229309, -0.022243529558181763, -0.0366944745182991, 0.07177852839231491, 0.04082547873258591, -0.14537683129310608, -0.051689647138118744, 0.03657936304807663, -0.09320977330207825, -0.04788562282919884, -0.010908728465437889, -0.1387360692024231, 0.05172973871231079, -0.10526689887046814, -0.023205310106277466, -0.07724300771951675, 0.08506593108177185, 0.035799540579319, 0.033434271812438965, -0.007971140556037426, 0.05589903146028519, 0.03617079183459282, 0.016098327934741974, -0.10678081214427948, -0.0951731875538826 ]
a020f6f062cb748e7eb7189d4a5816038d5980dd
# Dataset Card for "skewexp" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
Asap7772/skewexp
[ "region:us" ]
2024-01-25T07:37:33+00:00
{"configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}, {"split": "test", "path": "data/test-*"}]}], "dataset_info": {"features": [{"name": "output", "dtype": "string"}, {"name": "text", "dtype": "string"}, {"name": "alpaca_text", "dtype": "string"}, {"name": "prompt", "dtype": "string"}, {"name": "alpaca_prompt", "dtype": "string"}, {"name": "y_ref", "dtype": "string"}, {"name": "y_1", "dtype": "string"}, {"name": "y_2", "dtype": "string"}, {"name": "y_w", "dtype": "string"}, {"name": "y_w_alpaca", "dtype": "string"}, {"name": "y_l", "dtype": "string"}, {"name": "y_l_alpaca", "dtype": "string"}, {"name": "y_w_score", "dtype": "float64"}, {"name": "y_l_score", "dtype": "float64"}, {"name": "score_diff", "dtype": "float64"}], "splits": [{"name": "train", "num_bytes": 62156813, "num_examples": 19000}, {"name": "test", "num_bytes": 3233542, "num_examples": 1000}], "download_size": 31195534, "dataset_size": 65390355}}
2024-01-25T07:37:37+00:00
[]
[]
TAGS #region-us
# Dataset Card for "skewexp" More Information needed
[ "# Dataset Card for \"skewexp\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"skewexp\"\n\nMore Information needed" ]
[ 6, 13 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"skewexp\"\n\nMore Information needed" ]
[ -0.10529416799545288, 0.12602664530277252, -0.004620356950908899, -0.014222544617950916, 0.08732981234788895, 0.07853326201438904, 0.12764208018779755, 0.046025101095438004, 0.2012997567653656, -0.01506737619638443, 0.13612054288387299, 0.11773622781038284, 0.07408498227596283, 0.263623982667923, -0.04775071144104004, -0.04778938740491867, 0.08597332239151001, -0.005235497839748859, -0.04526155814528465, 0.034867849200963974, 0.0040275040082633495, -0.09833243489265442, 0.06227649748325348, -0.11146850883960724, -0.21319204568862915, 0.04467165842652321, -0.026982272043824196, -0.06642170250415802, 0.06736989319324493, -0.03596210852265358, 0.16784213483333588, 0.00597124919295311, 0.004986970219761133, -0.14264477789402008, 0.021618390455842018, 0.004630156327039003, -0.036436740309000015, 0.0501791350543499, 0.006189627572894096, -0.0804261863231659, -0.13216164708137512, -0.020001349970698357, 0.030232952907681465, -0.03511884808540344, -0.1148126944899559, -0.17470301687717438, -0.12317747622728348, -0.015217984095215797, 0.04950682446360588, -0.038743969053030014, 0.07671951502561569, 0.1413908153772354, -0.10927974432706833, 0.056354790925979614, 0.14546000957489014, -0.06334307044744492, 0.04044469818472862, 0.12751434743404388, -0.09213269501924515, 0.07698240131139755, -0.05494637042284012, 0.07900681346654892, 0.06411112844944, -0.03380599617958069, -0.056471679359674454, -0.039360739290714264, -0.1798049956560135, 0.10197918117046356, -0.039497166872024536, -0.08209336549043655, 0.2636535167694092, -0.008029486984014511, -0.017088282853364944, 0.015606826171278954, -0.005918834358453751, -0.011499162763357162, 0.01152272243052721, 0.05688215419650078, 0.008499923162162304, -0.014994009397923946, 0.0689394399523735, -0.026383545249700546, -0.09188015758991241, -0.12759490311145782, -0.17470206320285797, 0.16310812532901764, -0.022544901818037033, 0.1868305653333664, -0.1722448170185089, -0.03001590631902218, -0.010126832872629166, -0.02049863152205944, -0.028835980221629143, -0.10976169258356094, -0.05568690970540047, -0.01174130942672491, 0.027218103408813477, 0.02378147840499878, 0.11590515077114105, 0.12654347717761993, 0.11914996802806854, 0.061706677079200745, -0.06801856309175491, 0.10831800848245621, 0.20891211926937103, -0.06964992731809616, 0.019476793706417084, 0.011502577923238277, -0.0221206396818161, -0.16114625334739685, -0.020803963765501976, -0.12534773349761963, -0.09177025407552719, -0.0028851337265223265, -0.08198539167642593, 0.15503919124603271, 0.004095596261322498, -0.11299621313810349, -0.03506770730018616, -0.038136400282382965, 0.12583430111408234, -0.1049279049038887, 0.01775406487286091, -0.016156286001205444, -0.026538921520113945, 0.05498738959431648, -0.102324940264225, -0.0038061607629060745, 0.05345200374722481, 0.08805152773857117, -0.08126670867204666, 0.01769074611365795, -0.008639628998935223, -0.03543207421898842, 0.07680050283670425, -0.20633839070796967, 0.0912143886089325, -0.09831034392118454, -0.2649991512298584, 0.022537516430020332, -0.0019867396913468838, -0.04830283671617508, 0.1269940882921219, 0.005893481895327568, 0.0831277146935463, -0.010052567347884178, -0.08309118449687958, 0.10035014152526855, -0.0820317342877388, 0.034181591123342514, -0.09219890832901001, 0.08380817621946335, -0.18734446167945862, 0.009993059560656548, -0.09668446332216263, 0.0007658665417693555, -0.10525650531053543, 0.03496244549751282, -0.1857227385044098, 0.10982482135295868, -0.05938008427619934, -0.006436181254684925, -0.11622244119644165, 0.026512358337640762, -0.012095319107174873, 0.09160695225000381, -0.16810579597949982, -0.046740759164094925, 0.18281424045562744, -0.17733848094940186, -0.18695111572742462, 0.0040723099373281, -0.05281643196940422, 0.053882796317338943, 0.006176881026476622, 0.35594412684440613, 0.04000784829258919, -0.16031086444854736, 0.018872061744332314, 0.12722520530223846, -0.19973112642765045, -0.26152169704437256, 0.06452449411153793, -0.06731574982404709, -0.1190081462264061, 0.023294728249311447, 0.06310651451349258, 0.0384674035012722, -0.06819195300340652, -0.07109632343053818, 0.021370885893702507, -0.12967857718467712, 0.05368956923484802, 0.009461428970098495, 0.03858325257897377, -0.01887514814734459, 0.19274181127548218, 0.06533972173929214, 0.040905531495809555, 0.021424464881420135, -0.03079673834145069, -0.01946059614419937, 0.10960256308317184, -0.08637215942144394, -0.012710772454738617, -0.14639930427074432, -0.20496568083763123, -0.013169268146157265, -0.01618729531764984, 0.012504112906754017, 0.018053261563181877, 0.07849095016717911, -0.030191734433174133, 0.05226052179932594, 0.06678919494152069, 0.11868609488010406, 0.06878410279750824, -0.02400057390332222, 0.0021464203018695116, 0.012762579135596752, -0.0712868869304657, -0.06995687633752823, -0.0012874476378783584, -0.04256477206945419, 0.09206881374120712, 0.0349758081138134, 0.025095755234360695, -0.021251557394862175, 0.05430791527032852, 0.021907972171902657, 0.03747989982366562, -0.05109551176428795, 0.032138995826244354, -0.027941597625613213, 0.01702052541077137, 0.06503531336784363, -0.04376586154103279, 0.19884459674358368, 0.13114701211452484, -0.06589115411043167, 0.020216796547174454, -0.13826189935207367, 0.03398871049284935, -0.034690309315919876, -0.08649887889623642, 0.012722790241241455, -0.07074465602636337, -0.01713619753718376, 0.009324326179921627, -0.035564206540584564, 0.021496037021279335, 0.0378413163125515, -0.013557424768805504, -0.09742464870214462, 0.015879126265645027, 0.11652226001024246, -0.21586419641971588, 0.1511107087135315, 0.2117525190114975, 0.1357697993516922, 0.15892885625362396, -0.0839855968952179, -0.07003311812877655, 0.01259890478104353, -0.06184227019548416, -0.07185368984937668, 0.191358283162117, -0.04877341166138649, -0.012635506689548492, 0.10735246539115906, 0.0161820650100708, 0.06461762636899948, -0.07528141140937805, -0.09977331012487411, -0.016093391925096512, -0.007020492106676102, -0.1337938755750656, 0.04554128646850586, 0.009073776192963123, 0.05134838446974754, 0.019721608608961105, 0.012318942695856094, 0.12812985479831696, -0.009788136929273605, -0.020401975139975548, 0.10278820991516113, -0.185127854347229, -0.22524520754814148, -0.08685094118118286, -0.09192058444023132, 0.042381662875413895, -0.003898355644196272, -0.051050104200839996, -0.10349109768867493, -0.049664758145809174, 0.06535639613866806, -0.05486178398132324, -0.16201263666152954, -0.004597800783813, 0.04339069500565529, -0.012715782970190048, -0.1286601424217224, -0.09501886367797852, 0.0090181278064847, -0.03661694377660751, 0.12821103632450104, 0.14261697232723236, -0.06733540445566177, 0.15362659096717834, 0.08971676975488663, -0.008423137478530407, 0.08240227401256561, -0.05546678602695465, 0.09298355877399445, -0.046633824706077576, -0.04720480367541313, 0.08345048129558563, -0.03732025623321533, 0.031054796651005745, 0.024657176807522774, 0.040786195546388626, -0.12183546274900436, -0.02309134230017662, -0.027116429060697556, -0.21044407784938812, -0.2177477478981018, -0.12193727493286133, -0.04896410182118416, 0.11238545924425125, 0.09003529697656631, 0.05250215530395508, 0.02409188076853752, 0.1070079505443573, 0.14098022878170013, -0.040100570768117905, -0.11534354835748672, -0.012590374797582626, -0.008225949481129646, 0.004792432300746441, -0.009146740660071373, -0.15032579004764557, 0.022802069783210754, 0.13459904491901398, 0.1659233272075653, 0.23837067186832428, 0.08536765724420547, 0.0742243304848671, 0.03611481562256813, 0.12116686999797821, 0.10349498689174652, 0.18384875357151031, 0.11366116255521774, -0.021215926855802536, 0.014959419146180153, 0.032031167298555374, -0.10942906141281128, 0.003605242120102048, 0.11085847020149231, -0.20196732878684998, 0.03757457807660103, -0.1318548321723938, 0.029922930523753166, -0.08903395384550095, 0.047707680612802505, -0.10250049829483032, 0.04227965325117111, 0.011617541313171387, 0.12211405485868454, -0.07767055183649063, 0.07113222777843475, 0.09169624000787735, -0.11586396396160126, 0.08379074931144714, 0.017060106620192528, 0.06857460737228394, -0.04486938193440437, -0.043599460273981094, -0.12434718012809753, -0.0875329002737999, -0.023366518318653107, 0.10703667253255844, -0.1661275178194046, 0.2042008638381958, 0.021128429099917412, -0.0503728911280632, -0.13273820281028748, -0.08464265614748001, -0.008365452289581299, -0.04927072301506996, 0.13737404346466064, 0.04946201294660568, -0.2715064585208893, -0.21200788021087646, -0.08782826364040375, -0.03443469479680061, 0.10459163039922714, 0.020553214475512505, -0.11957655102014542, 0.06909281760454178, 0.02272890880703926, -0.07195675373077393, -0.15404708683490753, 0.01014083530753851, -0.08637519180774689, -0.024526428431272507, 0.02376076765358448, -0.12477988749742508, 0.03265205770730972, 0.0014347926480695605, -0.1333463490009308, 0.022545238956809044, -0.019649503752589226, -0.04928819090127945, -0.1302223950624466, 0.000034302473068237305, 0.1059049591422081, -0.03341620787978172, 0.05072127655148506, 0.04109684377908707, -0.018143601715564728, -0.019925041124224663, -0.21083632111549377, 0.07914645969867706, -0.07964203506708145, 0.09871186316013336, -0.05816267430782318, 0.06322940438985825, 0.002095939125865698, 0.006623986177146435, -0.001420909888111055, 0.02974874898791313, -0.03088507428765297, -0.12158118933439255, 0.17396748065948486, -0.07303037494421005, 0.12184259295463562, 0.2937513291835785, 0.08409921079874039, 0.027710875496268272, 0.07551947236061096, -0.0349607989192009, 0.22831383347511292, 0.14409393072128296, -0.06999814510345459, 0.23831135034561157, 0.1455809623003006, -0.060010526329278946, -0.33462607860565186, -0.010253552347421646, -0.15168067812919617, -0.030691975727677345, 0.0633324459195137, -0.2283298671245575, 0.12663736939430237, 0.15994961559772491, -0.03603522107005119, 0.24536095559597015, -0.12459857761859894, -0.038179222494363785, 0.22954495251178741, 0.052960220724344254, 0.4109998643398285, -0.10178302973508835, -0.0867958664894104, -0.04852158948779106, -0.17404010891914368, 0.24910764396190643, -0.14783284068107605, -0.01757492870092392, 0.02441403828561306, 0.09671314805746078, -0.012401268817484379, -0.07776765525341034, 0.18382130563259125, 0.10878542065620422, 0.10883311927318573, -0.0873766764998436, -0.044073913246393204, 0.15918448567390442, -0.039983656257390976, 0.06876440346240997, 0.06451699882745743, 0.010125086642801762, -0.11778801679611206, 0.003224168671295047, 0.01330836396664381, 0.03358425945043564, 0.08104926347732544, -0.0662531778216362, -0.0887577012181282, -0.01654076762497425, -0.10834256559610367, -0.007661743089556694, 0.14913086593151093, 0.03267943486571312, 0.021918047219514847, 0.052451398223638535, 0.018763039261102676, -0.09168090671300888, -0.0488167442381382, -0.09261899441480637, -0.10310473293066025, 0.057446084916591644, -0.10828526318073273, 0.028161659836769104, 0.11496462672948837, 0.03702551871538162, 0.01788111962378025, 0.06015174463391304, -0.016385259106755257, 0.03592900559306145, 0.11458195745944977, -0.11185408383607864, -0.009705401957035065, 0.12255639582872391, -0.08164692670106888, 0.033924635499715805, 0.039331529289484024, 0.022781794890761375, 0.09128348529338837, -0.011684403754770756, -0.04645494744181633, 0.05782228335738182, -0.02631339058279991, 0.10050568729639053, 0.13430923223495483, 0.052022479474544525, -0.18737651407718658, 0.18044261634349823, 0.0019020992331206799, -0.03892584890127182, 0.04932833090424538, -0.026062317192554474, -0.10661143064498901, -0.10016955435276031, -0.02914367988705635, 0.19491784274578094, -0.13132570683956146, -0.12462259083986282, 0.06422948837280273, -0.05578159913420677, -0.004867395851761103, 0.1331036239862442, 0.03470349311828613, 0.10446660220623016, 0.05740540102124214, -0.039427269250154495, 0.016084451228380203, -0.0700242817401886, -0.10518813133239746, 0.014496348798274994, -0.0888863354921341, -0.09710531681776047, -0.05785957723855972, 0.21346601843833923, -0.07352914661169052, -0.07966910302639008, -0.12659870088100433, 0.09202800691127777, -0.07382035255432129, 0.07267477363348007, -0.025596749037504196, 0.0007465073722414672, 0.01927371136844158, -0.009702026844024658, -0.004904499743133783, -0.016639232635498047, -0.13217082619667053, 0.13009589910507202, 0.061537545174360275, -0.007260217797011137, -0.0746447816491127, -0.03897061198949814, 0.11591322720050812, 0.04297249764204025, 0.08988481760025024, 0.18994998931884766, 0.034141410142183304, 0.15410315990447998, -0.0710875391960144, -0.08971081674098969, 0.06829648464918137, 0.061470549553632736, 0.07110366970300674, 0.09276538342237473, -0.042648278176784515, 0.07215448468923569, -0.02340378239750862, 0.08445374667644501, -0.025520101189613342, -0.06792240589857101, -0.015916042029857635, -0.11279849708080292, -0.10496611148118973, -0.0015291012823581696, -0.0451849065721035, 0.20120690762996674, 0.03556602820754051, -0.024226689711213112, 0.06317336857318878, 0.04146880283951759, 0.01759062334895134, -0.04839930310845375, -0.014652172103524208, -0.1459355652332306, 0.06218241527676582, 0.03427331522107124, 0.04249066486954689, -0.030479038134217262, 0.45815280079841614, -0.02379598096013069, -0.142159566283226, -0.031138844788074493, 0.08187989890575409, -0.036715392023324966, 0.03212353214621544, 0.29339301586151123, 0.07713326066732407, -0.050660137087106705, -0.06278866529464722, 0.07809451967477798, -0.0130956145003438, 0.07827125489711761, 0.16925202310085297, 0.0361793152987957, 0.02232397347688675, -0.01877838373184204, 0.019646599888801575, -0.10628467053174973, 0.07743345946073532, -0.03928213194012642, -0.05746466666460037, 0.017085907980799675, 0.05189548432826996, -0.04771042242646217, 0.030525851994752884, -0.04466133937239647, 0.04604950547218323, -0.02133883535861969, -0.08193277567625046, -0.12379393726587296, -0.06850635260343552, -0.00829383172094822, -0.06939365714788437, 0.037862516939640045, -0.02252611704170704, -0.013933295384049416, 0.2486441433429718, 0.06135690584778786, 0.045747991651296616, 0.09355142712593079, -0.060237105935811996, 0.07697075605392456, 0.027072522789239883, -0.01994490809738636, -0.07656794041395187, -0.011962657794356346, -0.08948308229446411, 0.06371502578258514, -0.028563208878040314, -0.003959168680012226, -0.04524872079491615, 0.051901925355196, 0.03910234570503235, -0.14248402416706085, -0.06466618925333023, -0.07036557048559189, 0.08091341704130173, -0.10638885945081711, 0.030224701389670372, 0.04943918064236641, 0.06995915621519089, 0.021054396405816078, 0.06591859459877014, 0.03660307824611664, 0.018823228776454926, -0.043888069689273834, -0.02260236069560051, -0.022824645042419434, 0.08975901454687119, -0.011982318945229053, -0.04927362501621246, -0.010458170436322689, 0.1309276819229126, 0.23412981629371643, -0.05048465356230736, 0.009466050192713737, -0.011897358112037182, 0.04169232398271561, 0.05458624288439751, 0.18639236688613892, 0.0065437243320047855, 0.09645340591669083, -0.024904882535338402, -0.05671089142560959, 0.020537083968520164, -0.08881092071533203, -0.09581205993890762, -0.02762804925441742, 0.029722629114985466, -0.05179309844970703, -0.10925157368183136, 0.14550144970417023, -0.1810513585805893, 0.041950296610593796, 0.12418457865715027, -0.09495687484741211, -0.0966009870171547, -0.015067026950418949, 0.023355847224593163, 0.02296522445976734, -0.0020690157543867826, -0.08410567045211792, -0.003979292698204517, -0.02932833507657051, 0.010598385706543922, -0.3298227787017822, -0.17836135625839233, 0.03186631575226784, 0.025944683700799942, 0.0703243538737297, -0.028129106387495995, 0.15051758289337158, 0.005843267310410738, 0.04346611723303795, -0.09017764031887054, 0.025871725752949715, 0.00019604551198426634, 0.06995438039302826, -0.05970403552055359, 0.021495245397090912, -0.0500592365860939, -0.15292468667030334, 0.06809765100479126, -0.03447171300649643, -0.06370141357183456, 0.06211934983730316, 0.02463507652282715, 0.029638243839144707, -0.004524586722254753, -0.0993010476231575, 0.14199183881282806, -0.033292558044195175, -0.009788152761757374, -0.002599430503323674, 0.025671225041151047, 0.05186549574136734, -0.00591130880638957, -0.11359800398349762, -0.10155932605266571, 0.0302545428276062, -0.08408988267183304, -0.009391367435455322, 0.013338028453290462, -0.10103973001241684, 0.06447675079107285, -0.1219523549079895, 0.050734661519527435, -0.009650744497776031, 0.027842998504638672, 0.08969148248434067, 0.04126245900988579, -0.03307563439011574, 0.016743062064051628, 0.040854714810848236, 0.010329666547477245, 0.007652900647372007, -0.06146775186061859 ]
e6ff00320227fad621c5ad17936868f8c1a8906a
# Dataset Card for "skewexp_maxlength" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
Asap7772/skewexp_maxlength
[ "region:us" ]
2024-01-25T07:37:42+00:00
{"configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}, {"split": "test", "path": "data/test-*"}]}], "dataset_info": {"features": [{"name": "output", "dtype": "string"}, {"name": "text", "dtype": "string"}, {"name": "alpaca_text", "dtype": "string"}, {"name": "prompt", "dtype": "string"}, {"name": "alpaca_prompt", "dtype": "string"}, {"name": "y_ref", "dtype": "string"}, {"name": "y_1", "dtype": "string"}, {"name": "y_2", "dtype": "string"}, {"name": "y_w", "dtype": "string"}, {"name": "y_w_alpaca", "dtype": "string"}, {"name": "y_l", "dtype": "string"}, {"name": "y_l_alpaca", "dtype": "string"}, {"name": "y_w_score", "dtype": "float64"}, {"name": "y_l_score", "dtype": "float64"}, {"name": "score_diff", "dtype": "float64"}], "splits": [{"name": "train", "num_bytes": 62156813, "num_examples": 19000}, {"name": "test", "num_bytes": 3233542, "num_examples": 1000}], "download_size": 31145494, "dataset_size": 65390355}}
2024-01-25T07:37:45+00:00
[]
[]
TAGS #region-us
# Dataset Card for "skewexp_maxlength" More Information needed
[ "# Dataset Card for \"skewexp_maxlength\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"skewexp_maxlength\"\n\nMore Information needed" ]
[ 6, 17 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"skewexp_maxlength\"\n\nMore Information needed" ]
[ -0.09861826151609421, 0.017993608489632607, -0.0029581591952592134, 0.0017442703247070312, 0.06534510105848312, 0.06703110784292221, 0.07134318351745605, 0.06505770236253738, 0.14517410099506378, 0.021878687664866447, 0.1491217464208603, -0.00500986073166132, 0.04705766215920448, 0.25406211614608765, -0.0276406891644001, -0.10515057295560837, 0.046872805804014206, 0.04529735818505287, -0.04641617089509964, 0.05948901176452637, 0.017530865967273712, -0.17422086000442505, 0.07513434439897537, -0.10769880563020706, -0.2512754201889038, 0.03585818409919739, -0.041304923593997955, -0.07625340670347214, 0.04766875132918358, -0.06320006400346756, 0.11114539206027985, -0.02950047329068184, 0.0065038916654884815, -0.09930390864610672, 0.015494315885007381, 0.027819061651825905, 0.015230259858071804, 0.06358050554990768, 0.06798132508993149, 0.011555115692317486, -0.06650499254465103, -0.058407850563526154, -0.021373162046074867, -0.021217262372374535, -0.0761672630906105, -0.11032740026712418, -0.09269723296165466, -0.0351468063890934, 0.06359434872865677, -0.00653532799333334, 0.05529266968369484, 0.12367868423461914, -0.13204537332057953, 0.022408733144402504, 0.15330582857131958, -0.13492847979068756, 0.010736008174717426, 0.11745762079954147, -0.027234135195612907, 0.11438463628292084, -0.09997405111789703, 0.03698138892650604, 0.11184309422969818, -0.022492174059152603, 0.03161197155714035, -0.044923070818185806, -0.08511918783187866, 0.11806097626686096, -0.035793885588645935, -0.044255081564188004, 0.2530820667743683, 0.020118746906518936, 0.02460693009197712, -0.04214749112725258, -0.025659076869487762, -0.11094780266284943, -0.013011835515499115, 0.09123010188341141, 0.0548965148627758, -0.03461028262972832, -0.025953933596611023, 0.012161783874034882, -0.09115414321422577, -0.11659097671508789, -0.1839039921760559, 0.10247582197189331, -0.02645278349518776, 0.17880691587924957, -0.14155684411525726, -0.06852319836616516, -0.050328727811574936, -0.023910963907837868, -0.030408082529902458, -0.10754207521677017, -0.057552460581064224, 0.008020784705877304, 0.011915151961147785, 0.003380382200703025, 0.0944739356637001, 0.03898850083351135, 0.12040748447179794, 0.028860509395599365, 0.033351052552461624, 0.09499672055244446, 0.18411922454833984, -0.06595161557197571, -0.027582023292779922, -0.06657005101442337, -0.05216098576784134, -0.10154471546411514, -0.017785750329494476, -0.08033686876296997, -0.07082515954971313, -0.020521437749266624, -0.10591983795166016, 0.09224047511816025, -0.020403169095516205, -0.1075555831193924, 0.0013030729023739696, -0.043968696147203445, 0.15153160691261292, -0.08814806491136551, 0.023881815373897552, -0.03421521186828613, -0.025541285052895546, 0.1221260204911232, -0.17920510470867157, -0.05154435709118843, 0.054408591240644455, 0.10420519858598709, -0.1064082607626915, -0.012663201428949833, -0.045403480529785156, -0.04851638525724411, 0.08641203492879868, -0.1789267659187317, 0.05920413136482239, -0.09434458613395691, -0.18513530492782593, 0.054817814379930496, -0.022529806941747665, -0.08653978258371353, 0.06567633152008057, 0.02030331641435623, 0.039423391222953796, 0.024393543601036072, -0.09099550545215607, 0.16408327221870422, -0.08622799813747406, 0.029627500101923943, -0.005016789771616459, 0.12939594686031342, -0.15290164947509766, -0.00987701490521431, -0.09477928280830383, 0.01458671223372221, -0.09269209206104279, 0.0161425918340683, -0.0987132266163826, 0.13281847536563873, -0.0830080434679985, -0.06306808441877365, -0.17588478326797485, 0.015987053513526917, -0.02814796194434166, 0.037244752049446106, -0.2255575805902481, -0.03972756862640381, 0.16824206709861755, -0.11373154073953629, -0.19585257768630981, 0.047754913568496704, -0.0036613186821341515, -0.03677738457918167, 0.005007870029658079, 0.29678329825401306, -0.007480301428586245, -0.12091254442930222, -0.026911536231637, 0.17299503087997437, -0.1451360434293747, -0.28759753704071045, 0.09463191777467728, -0.04043271765112877, 0.03721923753619194, 0.023100746795535088, 0.12711726129055023, 0.0577433779835701, -0.05210777744650841, -0.0466739684343338, -0.02405998669564724, -0.14867942035198212, 0.029450712725520134, 0.01516298484057188, 0.044598277658224106, -0.06930776685476303, 0.177137553691864, 0.11451508104801178, 0.044473323971033096, 0.010321222245693207, -0.01018848828971386, 0.01770070753991604, 0.05250605568289757, -0.17825138568878174, -0.02106751687824726, -0.13481147587299347, -0.11423291265964508, -0.0039084237068891525, 0.05535099282860756, -0.001993464073166251, 0.13534890115261078, 0.07089995592832565, -0.048215869814157486, 0.005804534535855055, 0.08973213285207748, 0.125505730509758, 0.010047324933111668, -0.0697653666138649, 0.05625542625784874, -0.008017553947865963, -0.03198296204209328, -0.1760639250278473, -0.08148059248924255, -0.07193771004676819, 0.08681820333003998, 0.03191451355814934, 0.02283838950097561, -0.017091326415538788, 0.05920718237757683, 0.0023476381320506334, -0.0007336106500588357, -0.04524213448166847, 0.02424575574696064, -0.03303990140557289, 0.07224106043577194, 0.07113677263259888, -0.05254106596112251, 0.2393319308757782, 0.1319257616996765, -0.07262584567070007, 0.051542606204748154, -0.08444683998823166, 0.009479681961238384, -0.04344010353088379, -0.1276128590106964, 0.008570090867578983, -0.10592430830001831, -0.02089850977063179, 0.053941760212183, -0.06638593226671219, -0.004622771870344877, 0.06402681022882462, -0.010708492249250412, -0.05262671038508415, 0.02218385599553585, 0.10596049576997757, -0.21600215137004852, 0.14396290481090546, 0.21199816465377808, 0.11542728543281555, 0.18755418062210083, -0.08622846007347107, -0.13100621104240417, 0.050600167363882065, -0.06543377041816711, -0.10036495327949524, 0.18319423496723175, -0.05263957753777504, -0.03431297838687897, 0.1060451865196228, 0.025811411440372467, 0.0503261536359787, -0.07083587348461151, -0.07092178612947464, -0.008149041794240475, -0.007669118233025074, -0.1316758394241333, 0.01229703240096569, 0.01467635203152895, 0.03820841386914253, 0.045379266142845154, 0.027010753750801086, 0.09960293024778366, -0.008577127009630203, 0.0041933823376894, 0.09296423196792603, -0.12724903225898743, -0.23587724566459656, -0.0630151629447937, -0.11136000603437424, 0.027316560968756676, -0.0016419931780546904, -0.06194056570529938, -0.13535410165786743, -0.0633673369884491, 0.053403254598379135, -0.013492495752871037, -0.1584220975637436, 0.058532096445560455, 0.033876512199640274, 0.007018700707703829, -0.06533978879451752, -0.07962387800216675, 0.03890479356050491, -0.06375649571418762, 0.12448454648256302, 0.08713197708129883, -0.02031051740050316, 0.14971914887428284, 0.10185231268405914, -0.020581042394042015, 0.06566080451011658, -0.02769329771399498, 0.09045109152793884, -0.05794426426291466, -0.04871773347258568, 0.14866551756858826, 0.07542724907398224, 0.00021750066662207246, 0.0032903046812862158, 0.06377946585416794, -0.0948035717010498, -0.03010817989706993, 0.004202746320515871, -0.2147032618522644, -0.14700235426425934, -0.11548551172018051, -0.11944827437400818, 0.13642339408397675, 0.18391475081443787, 0.045365430414676666, -0.09379658848047256, 0.0913916528224945, 0.11443942040205002, 0.025519605726003647, -0.15559330582618713, -0.017036212608218193, -0.03191465511918068, 0.0414295457303524, 0.04032156988978386, -0.1480116844177246, -0.009134725667536259, 0.13909919559955597, 0.1637980192899704, 0.2390463948249817, 0.025270430371165276, 0.15667106211185455, -0.0023798225447535515, 0.10166960954666138, 0.10859517753124237, 0.17874647676944733, 0.02399294637143612, -0.0687369704246521, 0.035731639713048935, 0.011663268320262432, -0.028519224375486374, -0.0018964961636811495, 0.0851176455616951, -0.14330464601516724, 0.04100351780653, -0.08391475677490234, 0.05664035305380821, -0.04106292128562927, 0.10301914811134338, -0.14716483652591705, 0.027772901579737663, -0.009739663451910019, 0.06543844938278198, -0.09850303083658218, 0.09221193194389343, 0.18816831707954407, -0.059869226068258286, 0.07920646667480469, -0.00004414055001689121, 0.07952124625444412, -0.0009854709496721625, 0.0005330995190888643, -0.07593649625778198, -0.016744330525398254, -0.005930142477154732, 0.10897470265626907, -0.1134309247136116, 0.16072894632816315, 0.015080452896654606, -0.03452933952212334, -0.12953412532806396, -0.0831664428114891, -0.028428101912140846, -0.08091794699430466, 0.08946157246828079, 0.029810508713126183, -0.1569463163614273, -0.14266392588615417, -0.10085086524486542, -0.038455162197351456, 0.11574453860521317, 0.06675487756729126, -0.08754237741231918, 0.04986400157213211, -0.01898934133350849, -0.046415336430072784, -0.13983508944511414, 0.07390517741441727, -0.10270701348781586, -0.040330931544303894, 0.08136952668428421, -0.21677348017692566, 0.03798830136656761, 0.008566439151763916, -0.14777429401874542, 0.10714281350374222, 0.05270387604832649, -0.055138859897851944, -0.08665476739406586, 0.023596322163939476, 0.17064902186393738, -0.008695621974766254, 0.07606328278779984, 0.0463295504450798, 0.07984039187431335, 0.007724914234131575, -0.17424710094928741, 0.07909461110830307, -0.07284419983625412, 0.07336362451314926, -0.045706119388341904, 0.07755634188652039, -0.08209314197301865, 0.027524814009666443, -0.018334362655878067, 0.04083692654967308, -0.08535197377204895, -0.10279592871665955, 0.07879743725061417, -0.142154723405838, 0.1556994616985321, 0.20324526727199554, 0.11550931632518768, 0.08934267610311508, 0.1365269422531128, -0.026264352723956108, 0.2023249715566635, 0.0510532408952713, -0.06254369765520096, 0.21920670568943024, 0.07022204995155334, 0.047871142625808716, -0.30128344893455505, 0.004526674747467041, -0.12600819766521454, -0.022805633023381233, -0.002507719211280346, -0.1459813266992569, 0.18714459240436554, 0.17340479791164398, -0.013655266724526882, 0.28345316648483276, -0.16899985074996948, -0.03957338258624077, 0.11393330991268158, 0.02102123387157917, 0.36519381403923035, -0.0708034485578537, -0.06279963254928589, -0.0938035324215889, -0.1469692587852478, 0.19932453334331512, -0.1636916995048523, 0.004759578034281731, -0.023712696507573128, 0.05387488380074501, -0.01923513598740101, -0.08121799677610397, 0.15137086808681488, 0.08168825507164001, 0.07889730483293533, -0.07248412817716599, 0.02248094044625759, 0.18228404223918915, -0.035742949694395065, 0.09644028544425964, -0.08653078973293304, -0.017596177756786346, -0.13466176390647888, -0.02004493959248066, 0.020218146964907646, -0.05267703905701637, 0.10197252780199051, -0.03809817507863045, -0.09636558592319489, -0.03210606798529625, -0.09519277513027191, -0.0014061917318031192, 0.12187311053276062, 0.03877881541848183, -0.029657259583473206, 0.09612005203962326, 0.04150380939245224, -0.20613250136375427, -0.1692747175693512, -0.09469947218894958, -0.0828147828578949, 0.06898942589759827, -0.1942848414182663, 0.03563239797949791, 0.08672720193862915, 0.046439241617918015, -0.053102150559425354, 0.07695752382278442, 0.0019511133432388306, 0.033662114292383194, 0.1525946706533432, -0.07450035214424133, -0.014733274467289448, 0.0938846617937088, -0.03604419156908989, -0.05340610444545746, 0.026757968589663506, -0.03447787091135979, 0.05517324432730675, 0.006718585733324289, -0.0470610074698925, 0.04818563163280487, -0.04466629400849342, 0.16635988652706146, 0.09140234440565109, 0.0862431675195694, -0.20729538798332214, 0.15058299899101257, -0.011271934024989605, -0.015241269953548908, -0.007155786734074354, 0.05074184015393257, -0.09563025832176208, -0.09211600571870804, 0.06113428622484207, 0.05807218328118324, -0.08127154409885406, -0.09096050262451172, 0.04097539931535721, -0.10058954358100891, 0.03381055220961571, 0.0437515452504158, 0.04320550337433815, 0.07202289998531342, 0.02504074200987816, -0.05525419861078262, -0.04980108141899109, -0.04605092108249664, -0.08850675076246262, 0.0753919780254364, -0.08357973396778107, -0.029832938686013222, -0.07146807014942169, 0.1800665706396103, -0.0733117014169693, -0.012369588948786259, -0.07229318469762802, 0.04858234152197838, -0.09272400289773941, 0.04394199326634407, -0.0031616920605301857, -0.008612562902271748, -0.02639569342136383, 0.06861573457717896, -0.04832185059785843, -0.04587011784315109, -0.11555911600589752, 0.05899828299880028, 0.04429802671074867, -0.028559846803545952, -0.04029547795653343, -0.008245669305324554, 0.06988639384508133, 0.030927952378988266, 0.06313582509756088, 0.10247092694044113, 0.04099774360656738, 0.09533857554197311, 0.027845727279782295, -0.12523318827152252, 0.10703830420970917, 0.08427313715219498, 0.09508227556943893, 0.1583898365497589, -0.0046406942419707775, 0.09447292238473892, 0.04614890366792679, 0.0836527869105339, -0.14055851101875305, -0.07395585626363754, -0.03229928016662598, -0.19220058619976044, -0.07405447959899902, 0.006999744102358818, -0.06256209313869476, 0.1462419629096985, 0.06039952486753464, 0.0034959178883582354, 0.04425620660185814, -0.0004660442937165499, -0.05313996970653534, -0.032863643020391464, -0.04098588600754738, -0.15829019248485565, 0.045815348625183105, 0.048922035843133926, 0.06592822074890137, -0.025953853502869606, 0.4379432201385498, 0.006022786255925894, -0.15083442628383636, -0.035564739257097244, 0.10682143270969391, -0.03026263229548931, 0.015338193625211716, 0.4262903034687042, 0.10658062249422073, -0.057765599340200424, -0.05447380617260933, 0.11163309216499329, 0.07126941531896591, 0.16331776976585388, 0.17185038328170776, 0.11544369161128998, -0.025063052773475647, 0.00882317777723074, -0.0011848144931718707, -0.1404583603143692, 0.028993258252739906, 0.04063623771071434, -0.08032351732254028, 0.05567878112196922, 0.053425081074237823, -0.01454177126288414, 0.05702732503414154, -0.07083066552877426, -0.014082378707826138, -0.04382014274597168, -0.04837331175804138, -0.08232781291007996, -0.034574493765830994, -0.030840681865811348, -0.10180250555276871, 0.029753463342785835, -0.045335803180933, 0.0077661434188485146, 0.20941489934921265, 0.042350005358457565, 0.037438102066516876, 0.10451918840408325, -0.04071510583162308, 0.0528908334672451, 0.05438658595085144, 0.01078407745808363, -0.06024225056171417, 0.05586300790309906, -0.037730127573013306, 0.05726459622383118, -0.09267150610685349, 0.004139612894505262, -0.06637275964021683, 0.04883572831749916, 0.027235332876443863, -0.07377433031797409, -0.07309761643409729, -0.0752030536532402, 0.0705905556678772, -0.03400184214115143, 0.059256527572870255, 0.06776121258735657, 0.035175446420907974, -0.016477951779961586, 0.10085930675268173, -0.015374059788882732, -0.023530689999461174, -0.021842971444129944, -0.041652146726846695, -0.0018166712252423167, 0.08786054700613022, -0.052571024745702744, -0.05958952754735947, -0.05265265330672264, 0.17169779539108276, 0.23015429079532623, -0.08434601128101349, -0.003605928737670183, -0.02520350180566311, 0.027507493272423744, 0.02281409688293934, 0.19676312804222107, 0.03601205721497536, 0.14668887853622437, -0.029857350513339043, -0.08045966178178787, 0.034291937947273254, -0.03867850825190544, -0.11447371542453766, 0.0028167918790131807, 0.03633490204811096, -0.05335754528641701, -0.10507868975400925, 0.06503747403621674, -0.08168015629053116, 0.17145682871341705, 0.12018401175737381, -0.12385658174753189, -0.078156977891922, 0.006127974949777126, 0.04191143810749054, 0.027524029836058617, 0.017706135287880898, -0.10467421263456345, 0.025489751249551773, -0.1010400801897049, -0.009553419426083565, -0.3410417139530182, -0.14660194516181946, 0.0320410393178463, 0.06150002032518387, 0.06157355010509491, -0.022077228873968124, 0.09967821836471558, 0.030048899352550507, 0.01313453447073698, -0.11192652583122253, 0.04441266879439354, -0.031830232590436935, 0.007717357948422432, -0.009055697359144688, 0.07159562408924103, -0.052716515958309174, -0.1893634796142578, 0.07064604014158249, -0.030972573906183243, -0.1151818335056305, 0.06002752482891083, 0.06583760678768158, 0.020370254293084145, -0.050194114446640015, -0.06176392361521721, 0.13730084896087646, -0.011965451762080193, -0.033177684992551804, -0.021341970190405846, 0.0387832410633564, 0.09280125051736832, -0.005660891067236662, -0.10276772081851959, -0.08468350768089294, -0.02388741262257099, -0.1300317943096161, 0.02620556391775608, -0.015316798351705074, -0.18443119525909424, 0.03731634095311165, -0.09841874986886978, 0.04355080425739288, -0.024420641362667084, 0.029642269015312195, 0.11091320961713791, 0.03643064945936203, -0.0380525067448616, 0.0028001556638628244, 0.03780929371714592, -0.009723953902721405, -0.01926187053322792, -0.07195145636796951 ]
d1810ea6b61a0c9bc548b4faf618462213339f3e
# Dataset Card for "skewexp_minlength" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
Asap7772/skewexp_minlength
[ "region:us" ]
2024-01-25T07:37:51+00:00
{"configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}, {"split": "test", "path": "data/test-*"}]}], "dataset_info": {"features": [{"name": "output", "dtype": "string"}, {"name": "text", "dtype": "string"}, {"name": "alpaca_text", "dtype": "string"}, {"name": "prompt", "dtype": "string"}, {"name": "alpaca_prompt", "dtype": "string"}, {"name": "y_ref", "dtype": "string"}, {"name": "y_1", "dtype": "string"}, {"name": "y_2", "dtype": "string"}, {"name": "y_w", "dtype": "string"}, {"name": "y_w_alpaca", "dtype": "string"}, {"name": "y_l", "dtype": "string"}, {"name": "y_l_alpaca", "dtype": "string"}, {"name": "y_w_score", "dtype": "float64"}, {"name": "y_l_score", "dtype": "float64"}, {"name": "score_diff", "dtype": "float64"}], "splits": [{"name": "train", "num_bytes": 62156813, "num_examples": 19000}, {"name": "test", "num_bytes": 3233542, "num_examples": 1000}], "download_size": 31144787, "dataset_size": 65390355}}
2024-01-25T07:37:55+00:00
[]
[]
TAGS #region-us
# Dataset Card for "skewexp_minlength" More Information needed
[ "# Dataset Card for \"skewexp_minlength\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"skewexp_minlength\"\n\nMore Information needed" ]
[ 6, 17 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"skewexp_minlength\"\n\nMore Information needed" ]
[ -0.09750694036483765, 0.04622070491313934, -0.0035981503315269947, -0.0010965698165819049, 0.05702574551105499, 0.04499742016196251, 0.10150543600320816, 0.06726913899183273, 0.06717172265052795, 0.019213490188121796, 0.16954851150512695, 0.0042906757444143295, 0.06086413189768791, 0.21675604581832886, -0.06279930472373962, -0.14869321882724762, 0.04966465011239052, 0.045187678188085556, -0.009531090967357159, 0.05420629307627678, 0.01073297206312418, -0.14913690090179443, 0.07401683926582336, -0.11204855889081955, -0.21703167259693146, 0.01480837445706129, -0.034497927874326706, -0.09074661135673523, 0.043212372809648514, -0.05483217537403107, 0.13075892627239227, -0.02461453340947628, 0.019178852438926697, -0.10846491158008575, 0.019565841183066368, 0.018379811197519302, 0.027727587148547173, 0.07455072551965714, 0.051366932690143585, 0.007465314120054245, -0.0784960463643074, -0.05861693248152733, 0.0036709364503622055, -0.03199295699596405, -0.08334584534168243, -0.11374474316835403, -0.08732325583696365, -0.03447718173265457, 0.043044134974479675, -0.0008207272621802986, 0.05027423053979874, 0.12395228445529938, -0.13703900575637817, 0.024675821885466576, 0.15344560146331787, -0.06951770186424255, -0.0019129007123410702, 0.11607316136360168, -0.051466163247823715, 0.12876640260219574, -0.09100808948278427, 0.06795013695955276, 0.08795206993818283, -0.016042275354266167, 0.014621738344430923, -0.04992485046386719, -0.10528384149074554, 0.12166937440633774, -0.029373774304986, -0.06695210188627243, 0.21765287220478058, 0.011421799659729004, 0.031556304544210434, -0.03896086663007736, -0.017318256199359894, -0.08955931663513184, -0.0532936155796051, 0.09638246148824692, 0.05188876762986183, -0.015964757651090622, -0.04908553138375282, -0.006982574705034494, -0.09364897757768631, -0.126821368932724, -0.1953815221786499, 0.1303950995206833, -0.025293421000242233, 0.17011916637420654, -0.17947492003440857, -0.06403820961713791, -0.0405627079308033, -0.029286881908774376, -0.026078037917613983, -0.09735865145921707, 0.00933869369328022, 0.01056765764951706, 0.0025078565813601017, 0.004672547336667776, 0.10986379534006119, 0.06492455303668976, 0.10673215985298157, 0.04183569923043251, 0.025898024439811707, 0.09090771526098251, 0.14890716969966888, -0.03349200263619423, 0.0018875665264204144, -0.05425238236784935, -0.06055670231580734, -0.09589102864265442, 0.004545678850263357, -0.09921971708536148, -0.07664687186479568, -0.023764481768012047, -0.0921846479177475, 0.09419003129005432, -0.012713546864688396, -0.10365021973848343, 0.006533232983201742, -0.03253558278083801, 0.12086667120456696, -0.08855363726615906, 0.03514767438173294, -0.04224185645580292, -0.03433588892221451, 0.10769177973270416, -0.1542523056268692, -0.041875310242176056, 0.042189065366983414, 0.12340782582759857, -0.10651235282421112, -0.006023823749274015, -0.04645206779241562, -0.0615120492875576, 0.06937994807958603, -0.1807738095521927, 0.07910791784524918, -0.11503034830093384, -0.16530846059322357, 0.0586455836892128, -0.030561521649360657, -0.08031640201807022, 0.10616742819547653, -0.01802876964211464, 0.02705502137541771, 0.022154852747917175, -0.07744879275560379, 0.15021397173404694, -0.07774258404970169, 0.034613125026226044, -0.03055622987449169, 0.13512220978736877, -0.17100267112255096, 0.009418952278792858, -0.09976273775100708, 0.0033941578585654497, -0.060655053704977036, 0.01875716634094715, -0.12182989716529846, 0.12034165114164352, -0.05417582765221596, -0.056310079991817474, -0.19253642857074738, 0.016926109790802002, -0.010497305542230606, 0.0468098446726799, -0.1890496462583542, -0.05352330952882767, 0.18186819553375244, -0.11503176391124725, -0.12944769859313965, 0.038751617074012756, 0.0024458186235278845, -0.035352177917957306, 0.004963132552802563, 0.37185099720954895, 0.05001122131943703, -0.11211153119802475, 0.033801812678575516, 0.19440896809101105, -0.14367808401584625, -0.27214550971984863, 0.07013911753892899, -0.03991544991731644, -0.018635069951415062, 0.03348330780863762, 0.11850778013467789, 0.049733176827430725, -0.08085623383522034, -0.0471862368285656, -0.0029108489397913218, -0.14085239171981812, 0.03372932970523834, 0.037174537777900696, 0.04956715926527977, -0.07543075829744339, 0.19178174436092377, 0.09162324666976929, 0.06384893506765366, 0.0008695279830135405, -0.012992874719202518, 0.0063743977807462215, 0.07192441821098328, -0.11534994095563889, -0.022881142795085907, -0.14190416038036346, -0.11389631032943726, -0.0075028217397630215, 0.05258984491229057, 0.012220583856105804, 0.11207213997840881, 0.06908752024173737, -0.03983931988477707, 0.019283607602119446, 0.09254109114408493, 0.09587448835372925, 0.024865148589015007, -0.08273018896579742, 0.06452854722738266, 0.0268857404589653, -0.04152598977088928, -0.13302578032016754, -0.036417022347450256, -0.06853212416172028, 0.08477573096752167, 0.02156205102801323, 0.029390132054686546, -0.011648439802229404, 0.048165831714868546, 0.014303463511168957, 0.01867745630443096, -0.04139973223209381, 0.020594758912920952, -0.026624754071235657, 0.08696465939283371, 0.03688949719071388, -0.055951330810785294, 0.1972465217113495, 0.14688672125339508, -0.06729082018136978, 0.060554638504981995, -0.12908919155597687, 0.019123895093798637, -0.02464754320681095, -0.13403768837451935, 0.0052654929459095, -0.09576667845249176, -0.03380032256245613, 0.05319985747337341, -0.07098527997732162, 0.01462541427463293, 0.0465245321393013, -0.05013754218816757, -0.06468959897756577, 0.041703734546899796, 0.09011697769165039, -0.17902745306491852, 0.15501731634140015, 0.1950504183769226, 0.10488478094339371, 0.18683236837387085, -0.09378424286842346, -0.13453620672225952, 0.05342356860637665, -0.07501009851694107, -0.09258375316858292, 0.1810474395751953, -0.07330559939146042, -0.03576723858714104, 0.11236163973808289, 0.030081404373049736, 0.06642770022153854, -0.09886877983808517, -0.0791504830121994, -0.0073987869545817375, -0.010051602497696877, -0.15435484051704407, 0.0022162089589983225, 0.00967498030513525, 0.042342282831668854, 0.04086247459053993, 0.0016273525543510914, 0.0735352635383606, -0.0033280556090176105, 0.0027970108203589916, 0.08950666338205338, -0.16355453431606293, -0.2340989112854004, -0.0917094424366951, -0.10080355405807495, 0.042117152363061905, 0.0036378430668264627, -0.03645436838269234, -0.143541619181633, -0.06097474694252014, 0.03128290921449661, 0.004078051075339317, -0.13089467585086823, 0.05285477265715599, 0.05624549835920334, 0.010309719480574131, -0.10627236217260361, -0.09453216940164566, 0.022301891818642616, -0.08116541802883148, 0.1872178167104721, 0.11989451944828033, -0.06264255940914154, 0.13811436295509338, 0.11720078438520432, -0.032270848751068115, 0.07361973822116852, -0.03272354230284691, 0.11583851277828217, -0.03919530659914017, -0.04093925282359123, 0.13379868865013123, 0.06841219961643219, 0.02368074469268322, 0.016897857189178467, 0.060588981956243515, -0.10424450784921646, -0.03872380033135414, -0.006128125358372927, -0.20005539059638977, -0.13614422082901, -0.11439784616231918, -0.09233974665403366, 0.16942228376865387, 0.1424088478088379, 0.030998798087239265, -0.05771642550826073, 0.09351970255374908, 0.11888126283884048, 0.009396936744451523, -0.12214091420173645, -0.021816793829202652, 0.024406632408499718, 0.008590353652834892, 0.03610679507255554, -0.13590267300605774, 0.00016323781164828688, 0.15552383661270142, 0.13369034230709076, 0.24676278233528137, 0.05850285664200783, 0.13304807245731354, -0.011578989215195179, 0.11432681232690811, 0.11412285268306732, 0.16015243530273438, 0.05108323693275452, -0.06585387140512466, 0.02528422698378563, 0.027994344010949135, -0.046798571944236755, -0.01724427565932274, 0.06612671911716461, -0.15520036220550537, 0.04117066413164139, -0.01653519831597805, 0.04166026785969734, -0.05553799495100975, 0.10860299319028854, -0.1557130068540573, 0.013620608486235142, -0.015941962599754333, 0.058472421020269394, -0.09980548173189163, 0.10319088399410248, 0.20011913776397705, -0.049891725182533264, 0.031092984601855278, 0.0034798826090991497, 0.08916766196489334, -0.017066797241568565, 0.004541999660432339, -0.08498816192150116, -0.022833731025457382, -0.009344162419438362, 0.09815775603055954, -0.10735099762678146, 0.19830583035945892, 0.012379616498947144, -0.04362303391098976, -0.11853872984647751, -0.07167598605155945, -0.02959490567445755, -0.028511719778180122, 0.07852738350629807, 0.027091331779956818, -0.16469706594944, -0.1909489929676056, -0.1371430605649948, -0.041378505527973175, 0.16956163942813873, 0.0575522743165493, -0.09399479627609253, 0.01815055124461651, 0.0013083255616948009, -0.0387854166328907, -0.21792393922805786, 0.06815735250711441, -0.10109151899814606, -0.03360592573881149, 0.07930604368448257, -0.2072543054819107, 0.030451700091362, 0.01744156889617443, -0.12171506881713867, 0.04501164332032204, 0.07838659733533859, -0.06285730004310608, -0.10025537759065628, 0.03970068320631981, 0.14336265623569489, -0.02568880282342434, 0.08262215554714203, 0.058864254504442215, 0.049358438700437546, 0.016295062378048897, -0.1695575714111328, 0.07383877784013748, -0.07729615271091461, 0.08738966286182404, -0.04201197624206543, 0.06871815770864487, -0.08220525830984116, 0.028783874586224556, -0.013442497700452805, 0.030476905405521393, -0.09333613514900208, -0.10474706441164017, 0.07979623228311539, -0.11780207604169846, 0.1394253522157669, 0.16560794413089752, 0.09715890884399414, 0.048950549215078354, 0.11372160166501999, -0.019984785467386246, 0.19103968143463135, 0.040164608508348465, -0.06295280158519745, 0.2074267715215683, 0.12367258220911026, 0.05505729466676712, -0.32782045006752014, -0.0004991149180568755, -0.12931892275810242, -0.030737893655896187, 0.024851134046912193, -0.14679166674613953, 0.1648181974887848, 0.17149801552295685, -0.0034965884406119585, 0.22556275129318237, -0.2187042534351349, -0.03646497428417206, 0.15353597700595856, 0.05725936219096184, 0.33247798681259155, -0.07359901070594788, -0.08286283910274506, -0.06137204170227051, -0.17936356365680695, 0.16116373240947723, -0.16025950014591217, 0.0026112673804163933, 0.000514704326633364, 0.10063248872756958, -0.0037777042016386986, -0.06405185908079147, 0.1726144403219223, 0.06914383918046951, 0.10111305117607117, -0.08353143185377121, -0.04316123202443123, 0.163627028465271, -0.02802061103284359, 0.07698795199394226, -0.036951370537281036, 0.016135288402438164, -0.08379191160202026, -0.012565840035676956, 0.022893888875842094, -0.05797693505883217, 0.09731150418519974, -0.06174878776073456, -0.10068287700414658, -0.04165573790669441, -0.11906131356954575, -0.017931319773197174, 0.11560969799757004, 0.024104177951812744, -0.09285386651754379, 0.08140624314546585, 0.02628708817064762, -0.2017105221748352, -0.08206826448440552, -0.1011347621679306, -0.08473116904497147, 0.06326654553413391, -0.16522382199764252, 0.018877340480685234, 0.07085204124450684, 0.04668831080198288, -0.029048921540379524, 0.053231436759233475, 0.028602037578821182, 0.02457801252603531, 0.13049538433551788, -0.06637217849493027, -0.05282190814614296, 0.08101506531238556, -0.05828876420855522, -0.023124516010284424, 0.026399686932563782, -0.04297327995300293, 0.0706823319196701, 0.007623181212693453, -0.05419272929430008, 0.060272958129644394, -0.038430605083703995, 0.15216082334518433, 0.10971606522798538, 0.05516170710325241, -0.21199223399162292, 0.16450048983097076, -0.01216220948845148, -0.04101351276040077, -0.01496669091284275, 0.05262649431824684, -0.11929383128881454, -0.08986956626176834, 0.037977833300828934, 0.106821209192276, -0.09642383456230164, -0.0961015373468399, 0.03776762634515762, -0.08907752484083176, 0.02632768265902996, 0.05576729401946068, 0.03807797655463219, 0.06941533088684082, 0.01766318269073963, -0.019431564956903458, -0.03241267800331116, -0.027867285534739494, -0.06485164910554886, 0.06899120658636093, -0.06486663967370987, -0.02889101766049862, -0.07540751993656158, 0.1660122275352478, -0.07083474099636078, -0.023224612697958946, -0.08287807554006577, 0.06018783897161484, -0.1033858135342598, 0.0607294999063015, -0.0118642533197999, -0.002121423138305545, 0.00033677887404337525, 0.04194214940071106, -0.040961019694805145, -0.03861237317323685, -0.10068844258785248, 0.06935318559408188, 0.02614315040409565, -0.01824025623500347, -0.04817040637135506, -0.01596621237695217, 0.08222909271717072, 0.042845919728279114, 0.06326133757829666, 0.12747542560100555, 0.052485909312963486, 0.09437424689531326, 0.022162701934576035, -0.11471828073263168, 0.1017775684595108, 0.08097419887781143, 0.09778197854757309, 0.1390036940574646, -0.03262551128864288, 0.07056242227554321, 0.022496415302157402, 0.08519762754440308, -0.06474193185567856, -0.07117380946874619, -0.0067734066396951675, -0.18912631273269653, -0.10743243247270584, -0.00925075076520443, -0.04679887741804123, 0.17040082812309265, 0.03556029871106148, -0.006100193131715059, 0.04816506430506706, 0.0081265764310956, -0.023633992299437523, -0.028689566999673843, -0.022177573293447495, -0.16914866864681244, 0.026606960222125053, 0.02316926419734955, 0.060384251177310944, -0.03214465081691742, 0.3655238747596741, 0.046652525663375854, -0.16043001413345337, -0.029160555452108383, 0.10957061499357224, -0.035220734775066376, 0.02021852508187294, 0.4085369408130646, 0.1277974247932434, -0.06752428412437439, -0.07906137406826019, 0.10015901923179626, 0.06944626569747925, 0.14684432744979858, 0.17278699576854706, 0.13765059411525726, -0.018356386572122574, 0.024432135745882988, -0.018625149503350258, -0.08665594458580017, -0.05260246619582176, 0.05379832535982132, -0.0744333416223526, 0.03464324027299881, 0.03762279450893402, -0.022934235632419586, 0.06914477050304413, -0.08355995267629623, 0.004011336248368025, -0.018780944868922234, -0.06107249855995178, -0.07998581975698471, -0.09558029472827911, -0.037616658955812454, -0.11475902050733566, 0.010309560224413872, -0.05680669844150543, 0.029808593913912773, 0.20394064486026764, 0.03402889147400856, 0.04976492002606392, 0.1323404163122177, -0.1041783019900322, 0.0443590022623539, 0.047199998050928116, 0.00009718978981254622, -0.062011975795030594, -0.0016722617438063025, -0.06694930046796799, 0.05056064948439598, -0.07496524602174759, 0.008106031455099583, -0.05873219296336174, 0.050616536289453506, 0.002584348665550351, -0.10981713235378265, -0.07939868420362473, -0.07316858321428299, 0.05634539574384689, -0.057250943034887314, 0.0329916775226593, 0.06039293110370636, 0.044585131108760834, 0.004849019926041365, 0.1432006061077118, -0.022703837603330612, -0.0576929971575737, -0.03687873110175133, -0.042392484843730927, -0.014680524356663227, 0.11523333191871643, -0.04030057415366173, -0.07887423783540726, -0.03877551853656769, 0.2108362764120102, 0.20569759607315063, -0.06891356408596039, -0.0028979110065847635, 0.006886656861752272, 0.0301764328032732, 0.016457656398415565, 0.17962419986724854, 0.028510596603155136, 0.09900829195976257, -0.02188129350543022, -0.12996135652065277, 0.029532548040151596, -0.07131460309028625, -0.12492556869983673, 0.009055566042661667, 0.04787478968501091, -0.0751538798213005, -0.13157615065574646, 0.0744105875492096, -0.07953456044197083, 0.1792636513710022, 0.13089148700237274, -0.11051744222640991, -0.06731925904750824, 0.019560636952519417, 0.05777221545577049, 0.0034861827734857798, 0.036604709923267365, -0.08885229378938675, 0.008875533007085323, -0.05481589585542679, -0.005114803556352854, -0.3263494074344635, -0.1341450959444046, 0.04738869518041611, 0.02321925386786461, 0.06388678401708603, -0.02898498624563217, 0.07305338978767395, 0.0210411474108696, 0.035664208233356476, -0.09396553039550781, 0.06766299158334732, 0.005567481275647879, 0.050497982650995255, -0.027044186368584633, 0.06346777081489563, -0.06456348299980164, -0.17082853615283966, 0.07986073195934296, -0.05635581165552139, -0.10442085564136505, 0.048299640417099, 0.0539536289870739, 0.04094856604933739, -0.05958240106701851, -0.06057241931557655, 0.14148066937923431, -0.033995646983385086, -0.01089850440621376, -0.023690590634942055, 0.053498513996601105, 0.06540285795927048, -0.013056286610662937, -0.08582445979118347, -0.07615308463573456, -0.0007052074070088565, -0.15291225910186768, 0.020502056926488876, -0.001969369361177087, -0.1537201851606369, 0.043465666472911835, -0.09918338060379028, 0.05744893103837967, -0.013251233845949173, 0.0360303670167923, 0.14606574177742004, 0.009880668483674526, -0.045416828244924545, 0.014348744414746761, 0.049960847944021225, 0.023901017382740974, -0.005144030787050724, -0.06828146427869797 ]
3e303c79ca129407a5f0205abcdab3915b612c53
# Dataset Card for "skewlognormal" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
Asap7772/skewlognormal
[ "region:us" ]
2024-01-25T07:38:01+00:00
{"configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}, {"split": "test", "path": "data/test-*"}]}], "dataset_info": {"features": [{"name": "output", "dtype": "string"}, {"name": "text", "dtype": "string"}, {"name": "alpaca_text", "dtype": "string"}, {"name": "prompt", "dtype": "string"}, {"name": "alpaca_prompt", "dtype": "string"}, {"name": "y_ref", "dtype": "string"}, {"name": "y_1", "dtype": "string"}, {"name": "y_2", "dtype": "string"}, {"name": "y_w", "dtype": "string"}, {"name": "y_w_alpaca", "dtype": "string"}, {"name": "y_l", "dtype": "string"}, {"name": "y_l_alpaca", "dtype": "string"}, {"name": "y_w_score", "dtype": "float64"}, {"name": "y_l_score", "dtype": "float64"}, {"name": "score_diff", "dtype": "float64"}], "splits": [{"name": "train", "num_bytes": 77844991, "num_examples": 19000}, {"name": "test", "num_bytes": 4082779, "num_examples": 1000}], "download_size": 40268839, "dataset_size": 81927770}}
2024-01-25T07:38:05+00:00
[]
[]
TAGS #region-us
# Dataset Card for "skewlognormal" More Information needed
[ "# Dataset Card for \"skewlognormal\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"skewlognormal\"\n\nMore Information needed" ]
[ 6, 14 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"skewlognormal\"\n\nMore Information needed" ]
[ -0.09462761878967285, 0.21526813507080078, -0.004660598933696747, -0.01967865228652954, 0.06629320234060287, 0.06944336742162704, 0.12495853751897812, 0.04381309077143669, 0.17068196833133698, -0.006894350051879883, 0.12712223827838898, 0.0744582936167717, 0.03247044235467911, 0.18240170180797577, -0.03509753942489624, -0.08578545600175858, 0.06691638380289078, 0.02260640263557434, -0.08806974440813065, 0.01798914186656475, -0.024447329342365265, -0.06453650444746017, 0.05553353950381279, -0.10799253731966019, -0.1965354084968567, 0.05925397202372551, -0.0376814603805542, -0.05912347510457039, 0.06436250358819962, -0.05557002127170563, 0.12957918643951416, -0.019274594262242317, -0.015059955418109894, -0.12867285311222076, 0.021360529586672783, -0.015302163548767567, -0.03178197517991066, 0.036033932119607925, -0.0031639449298381805, -0.09150615334510803, -0.04968999698758125, -0.06799482554197311, 0.045959651470184326, -0.03758714348077774, -0.11419796943664551, -0.1667487770318985, -0.08170590549707413, -0.009533727541565895, 0.03406088054180145, -0.013006698340177536, 0.05773095786571503, 0.14023184776306152, -0.12335369735956192, 0.05733663961291313, 0.11537129431962967, -0.05310943350195885, 0.021119995042681694, 0.23235267400741577, -0.1069050207734108, 0.07085296511650085, -0.04578932747244835, 0.13448305428028107, 0.07366976141929626, -0.02154476009309292, -0.06211071088910103, -0.05587194859981537, -0.09704343229532242, 0.08190932869911194, -0.02474292553961277, -0.05516749247908592, 0.23205704987049103, -0.00811010506004095, 0.014170902781188488, 0.0047977683134377, 0.0004150932654738426, 0.0011746907839551568, 0.020433800294995308, 0.1078355684876442, -0.010628413408994675, -0.0005969566409476101, 0.07481466233730316, -0.09617424011230469, -0.09297573566436768, -0.09728184342384338, -0.15285564959049225, 0.17902444303035736, -0.03799987956881523, 0.16975927352905273, -0.1688249111175537, -0.0032877165358513594, -0.059462469071149826, -0.03894723579287529, -0.00241097342222929, -0.069956474006176, -0.06867819279432297, 0.013655143789947033, 0.0014796393224969506, 0.04038507118821144, 0.13004924356937408, 0.13518713414669037, 0.13144831359386444, 0.05878525972366333, -0.03392389789223671, 0.09372375160455704, 0.1694759875535965, -0.017202181741595268, 0.03750582039356232, -0.024703845381736755, 0.027357211336493492, -0.17312534153461456, 0.021169384941458702, -0.1347479373216629, -0.1057654619216919, -0.06449828296899796, -0.06817799061536789, 0.13650290668010712, 0.04386516287922859, -0.11590554565191269, -0.007318871561437845, -0.04859895631670952, 0.11385983973741531, -0.08632709830999374, 0.026277875527739525, -0.036919642239809036, -0.006795499473810196, 0.04517005756497383, -0.06393766403198242, 0.0035881425719708204, 0.04998127743601799, 0.10570356249809265, -0.06267202645540237, 0.02029917761683464, 0.0013290196657180786, 0.0012346258154138923, 0.07072804123163223, -0.248674675822258, 0.07843191176652908, -0.0971347764134407, -0.25611722469329834, 0.021934732794761658, -0.0055860127322375774, -0.03176691755652428, 0.09047766774892807, -0.0025096710305660963, 0.06620359420776367, -0.015763193368911743, -0.06742668151855469, 0.05704447627067566, -0.0648966059088707, 0.003632730571553111, -0.06338074058294296, 0.07329235225915909, -0.24461543560028076, -0.012261477299034595, -0.11379768699407578, 0.012049858458340168, -0.08402668684720993, 0.0762326642870903, -0.16422343254089355, 0.14452733099460602, -0.02618006058037281, -0.008739408105611801, -0.14422111213207245, 0.00991866085678339, -0.007398588117212057, 0.10082832723855972, -0.17820511758327484, -0.06439216434955597, 0.14993733167648315, -0.1849631518125534, -0.16085222363471985, 0.02486065961420536, -0.03581077978014946, -0.009459477849304676, 0.03925683721899986, 0.3795121908187866, 0.07918176800012589, -0.12851263582706451, 0.02704741060733795, 0.08302726596593857, -0.17171573638916016, -0.2757458984851837, 0.02444976381957531, -0.056873220950365067, -0.09329577535390854, 0.03523394092917442, 0.07945580035448074, 0.056871671229600906, -0.05298054218292236, -0.07682130485773087, -0.015751993283629417, -0.11796591430902481, 0.03871869668364525, 0.010553072206676006, 0.05092262104153633, -0.037196703255176544, 0.200140118598938, 0.08078143745660782, 0.049089863896369934, 0.013293216936290264, -0.008631337434053421, 0.03902652487158775, 0.12096820026636124, -0.08532130718231201, -0.006713066250085831, -0.1293981820344925, -0.2414885312318802, -0.012571222148835659, -0.061908140778541565, 0.07454538345336914, -0.0008511487394571304, 0.0832180455327034, -0.010768100619316101, 0.021611303091049194, 0.05901775881648064, 0.11752322316169739, 0.05296303704380989, 0.002780698239803314, 0.007286523934453726, 0.060924094170331955, -0.05665276572108269, -0.03829902037978172, -0.06757751107215881, -0.02831558883190155, 0.07456720620393753, 0.08571851253509521, 0.017855023965239525, -0.029784299433231354, 0.08327245712280273, 0.01723017729818821, 0.03381599113345146, -0.0434158630669117, 0.015415630303323269, -0.02954564057290554, -0.0876656100153923, 0.07955627888441086, -0.04521618410944939, 0.17601390182971954, 0.1366422325372696, -0.046548765152692795, 0.05551663041114807, -0.13798202574253082, 0.04634656012058258, -0.04775797203183174, -0.05767229199409485, 0.03112790547311306, -0.053125184029340744, -0.027537204325199127, 0.000959771394263953, -0.010564870201051235, 0.02809266187250614, 0.014472134411334991, -0.05075826123356819, -0.09001445770263672, 0.061457980424165726, 0.08309834450483322, -0.23640894889831543, 0.12569110095500946, 0.17118756473064423, 0.08508612960577011, 0.20325493812561035, -0.07680463045835495, -0.046839553862810135, 0.009349513798952103, -0.08817130327224731, -0.045411616563797, 0.17496885359287262, -0.05687003210186958, -0.005774547811597586, 0.08208640664815903, 0.022872760891914368, 0.08406174182891846, -0.076004259288311, -0.09763837605714798, -0.024056589230895042, 0.003153071738779545, -0.07439198344945908, 0.07002752274274826, 0.01136061828583479, 0.03543533757328987, 0.005551347509026527, 0.052437808364629745, 0.1101788878440857, -0.020404132083058357, 0.005100761074572802, 0.10788086801767349, -0.17564018070697784, -0.21581296622753143, -0.12900733947753906, -0.07940569519996643, -0.005748277064412832, -0.009053233079612255, -0.03553631529211998, -0.12158191949129105, -0.049642741680145264, 0.0482640266418457, -0.0099272346124053, -0.15127474069595337, 0.00816079881042242, 0.00880642794072628, -0.0042195916175842285, -0.11653254181146622, -0.08144370466470718, -0.00782218761742115, -0.05934339389204979, 0.1375950276851654, 0.1457207351922989, -0.09556455165147781, 0.14804625511169434, 0.10679500550031662, -0.0074700042605400085, 0.08500689268112183, -0.023694029077887535, 0.10306805372238159, -0.03859873116016388, -0.04331277683377266, 0.07166261225938797, -0.011553239077329636, 0.017237989231944084, 0.01416888926178217, 0.06217966601252556, -0.12825168669223785, -0.01690744422376156, 0.017166506499052048, -0.22424696385860443, -0.20742885768413544, -0.16448329389095306, -0.05973049998283386, 0.09098788350820541, 0.08368738740682602, 0.06279181689023972, 0.03266613185405731, 0.09118794649839401, 0.15335717797279358, -0.058179959654808044, -0.11411863565444946, -0.0015226938994601369, 0.08476858586072922, 0.025545068085193634, -0.015646280720829964, -0.1556445211172104, 0.021706005558371544, 0.13977491855621338, 0.1413094401359558, 0.19950442016124725, 0.057350412011146545, 0.047708604484796524, 0.009711283259093761, 0.10594797134399414, 0.12429243326187134, 0.11911088228225708, 0.12069710344076157, -0.02479693852365017, 0.04655289277434349, 0.0034735938534140587, -0.09385188668966293, 0.0014131172792986035, 0.11125034838914871, -0.19844067096710205, 0.07961740344762802, -0.08904432505369186, 0.008869309909641743, -0.09782823175191879, 0.07106540352106094, -0.08475037664175034, 0.03527006506919861, 0.004625751171261072, 0.15207433700561523, -0.06484649330377579, 0.0582905150949955, 0.10521715134382248, -0.08192408829927444, 0.10976281017065048, 0.0192467849701643, 0.07808493077754974, -0.07338355481624603, -0.03991513326764107, -0.13008446991443634, -0.07432492822408676, -0.018358374014496803, 0.08569037914276123, -0.0957956612110138, 0.19766800105571747, 0.01324359979480505, -0.05798529461026192, -0.09490404278039932, -0.09629599004983902, -0.009943333454430103, 0.008288059383630753, 0.10880124568939209, 0.07338304072618484, -0.22756993770599365, -0.20949982106685638, -0.1532929390668869, -0.04580114409327507, 0.09058209508657455, -0.023958547040820122, -0.12053831666707993, 0.08294174820184708, 0.018544333055615425, -0.07547203451395035, -0.20930172502994537, 0.031024547293782234, -0.09628088027238846, -0.03721504658460617, 0.07856576889753342, -0.17839090526103973, 0.021175280213356018, -0.018014468252658844, -0.07243052870035172, 0.08152403682470322, 0.078705333173275, -0.057026345282793045, -0.12201446294784546, -0.008642827160656452, 0.165704146027565, -0.048279304057359695, 0.07020150870084763, 0.009837881661951542, -0.03483238443732262, -0.05098576843738556, -0.2192908376455307, 0.06980487704277039, -0.05571000650525093, 0.11577358096837997, -0.04230648651719093, 0.14710718393325806, 0.028433293104171753, 0.006213742773979902, 0.0016555130714550614, 0.05082042142748833, -0.031068600714206696, -0.1321248710155487, 0.14994776248931885, -0.011367741040885448, 0.07801339775323868, 0.310226708650589, 0.09107735008001328, 0.02498052269220352, 0.07363402098417282, -0.033150527626276016, 0.19762027263641357, 0.14879868924617767, -0.062195539474487305, 0.19144542515277863, 0.13980786502361298, -0.06363087892532349, -0.3023528754711151, 0.016655946150422096, -0.14667735993862152, -0.013065273873507977, 0.03653980419039726, -0.20173679292201996, 0.21701346337795258, 0.128352090716362, -0.037188585847616196, 0.24328048527240753, -0.12080565840005875, -0.05037641152739525, 0.2035202980041504, 0.04418571665883064, 0.4170339107513428, -0.09214160591363907, -0.10041656345129013, -0.05272775888442993, -0.2013954520225525, 0.2421809881925583, -0.09900268167257309, 0.008309013210237026, 0.001272488385438919, 0.14016403257846832, 0.015591015107929707, -0.06539366394281387, 0.18824167549610138, 0.0911015197634697, 0.0992853045463562, -0.09221228957176208, -0.025647392496466637, 0.12362527847290039, -0.07716551423072815, 0.06611853837966919, 0.06952240318059921, 0.039849285036325455, -0.09593064337968826, -0.013545366935431957, 0.0015646846732124686, 0.07041660696268082, 0.07864358276128769, -0.07336198538541794, -0.08111254870891571, -0.017051728442311287, -0.12482001632452011, -0.007633333560079336, 0.17088240385055542, 0.005327068269252777, 0.016756003722548485, 0.005652171093970537, -0.006470241118222475, -0.07920587807893753, -0.044846076518297195, -0.08677670359611511, -0.07992016524076462, 0.04384942352771759, -0.1037680134177208, 0.022600427269935608, 0.12944315373897552, 0.0521986186504364, -0.006644407287240028, 0.020917529240250587, -0.02479114569723606, 0.018353387713432312, 0.12462538480758667, -0.14181236922740936, -0.017666086554527283, 0.08327901363372803, -0.1451108306646347, 0.07584846764802933, 0.06555242091417313, 0.055322859436273575, 0.08838540315628052, 0.00395815446972847, -0.03114961087703705, 0.013835088349878788, -0.04050912335515022, 0.0769091472029686, 0.1029195562005043, 0.0356367863714695, -0.1641455739736557, 0.1850403994321823, 0.0005471904878504574, 0.0028291966300457716, 0.036233093589544296, 0.06759762018918991, -0.09816005080938339, -0.0973389521241188, -0.060470398515462875, 0.2290404587984085, -0.16446159780025482, -0.09847927838563919, 0.06683063507080078, -0.03671552240848541, -0.027200544252991676, 0.12483160942792892, 0.047043588012456894, 0.07168987393379211, 0.0035325437784194946, -0.050328049808740616, 0.009457745589315891, -0.07631631940603256, -0.06303886324167252, 0.02332642674446106, -0.09453382343053818, -0.12254820019006729, -0.044940825551748276, 0.18112438917160034, -0.07575945556163788, -0.07855252176523209, -0.1074945405125618, 0.07575004547834396, -0.09878645092248917, 0.06439034640789032, -0.026805387809872627, 0.029574355110526085, -0.0046377177350223064, -0.02695462293922901, 0.0015080164885148406, -0.019312987104058266, -0.11071058362722397, 0.0974750816822052, 0.03321244195103645, -0.024356013163924217, -0.07613837718963623, -0.06787789613008499, 0.09856661409139633, 0.04088619351387024, 0.10244733840227127, 0.18306541442871094, 0.04305043816566467, 0.16266225278377533, -0.07878810167312622, -0.06960908323526382, 0.1331409066915512, 0.057636331766843796, 0.06899995356798172, 0.07804292440414429, -0.061866775155067444, 0.07253103703260422, 0.03129206597805023, 0.09453198313713074, -0.062012702226638794, -0.08348944038152695, -0.05679340288043022, -0.11753646284341812, -0.13870680332183838, 0.010180667974054813, -0.06495954096317291, 0.19437865912914276, 0.06088440492749214, -0.044070661067962646, 0.04279662296175957, 0.06541935354471207, 0.019954359158873558, -0.06237591430544853, -0.016947224736213684, -0.14737702906131744, 0.040786195546388626, 0.049711305648088455, 0.0449921153485775, -0.03472891077399254, 0.40691307187080383, -0.05362836644053459, -0.15500330924987793, -0.03741037845611572, 0.10273315757513046, -0.02293022722005844, 0.01387040689587593, 0.31611669063568115, 0.08016986399888992, -0.04818674921989441, -0.0019441278418526053, 0.09324318170547485, -0.003894645720720291, 0.16140170395374298, 0.14382825791835785, 0.012864183634519577, 0.029259352013468742, -0.01472800225019455, 0.037743184715509415, -0.029689932242035866, 0.010424427688121796, 0.00631193071603775, -0.05774388834834099, 0.03623209148645401, 0.04880630597472191, -0.06217311695218086, 0.03643718361854553, -0.046363819390535355, 0.027952445670962334, 0.04025427997112274, -0.08603612333536148, -0.1366216540336609, -0.16683322191238403, -0.022701725363731384, -0.022041337564587593, 0.04683448746800423, -0.0324624739587307, -0.021456172689795494, 0.1684103161096573, 0.08379928022623062, 0.024128371849656105, 0.12337591499090195, -0.009476769715547562, 0.07870659977197647, 0.02629796229302883, -0.01864362694323063, -0.08718498796224594, 0.03785638138651848, -0.058212727308273315, 0.03396250680088997, -0.006532370578497648, -0.03767033666372299, -0.04783685877919197, 0.036975372582674026, 0.04269899055361748, -0.14742513000965118, -0.05631718039512634, -0.06393135339021683, 0.08659977465867996, -0.06666182726621628, 0.03447715565562248, 0.06454136967658997, 0.015341147780418396, 0.01207086443901062, 0.1361016482114792, 0.026708215475082397, -0.0103822136297822, -0.009834582917392254, -0.02691604755818844, -0.06906596571207047, 0.05275002494454384, -0.05441125109791756, -0.04511650279164314, -0.0005029067397117615, 0.11642532795667648, 0.23753614723682404, -0.022391626611351967, 0.013331186026334763, -0.030314890667796135, 0.040396105498075485, 0.038361791521310806, 0.14534153044223785, 0.0015813211211934686, 0.0993044376373291, 0.012784483842551708, -0.07654432207345963, -0.002431754255667329, -0.09004679322242737, -0.1246102824807167, -0.040183234959840775, 0.028653403744101524, -0.05571789667010307, -0.11348792165517807, 0.14799146354198456, -0.21386384963989258, 0.07763069123029709, 0.09639687091112137, -0.15383093059062958, -0.11484340578317642, -0.013426135294139385, 0.011358941905200481, 0.0022679278627038, -0.007884346880018711, -0.09257519990205765, -0.017781876027584076, -0.09108384698629379, -0.011899560689926147, -0.34453773498535156, -0.16127397119998932, -0.006950737908482552, 0.0680994763970375, 0.06531748175621033, -0.012746636755764484, 0.08473312854766846, 0.017495349049568176, 0.04581676051020622, -0.09587997198104858, 0.03517436236143112, -0.02597632445394993, 0.045876502990722656, -0.08422154188156128, 0.03608766570687294, -0.055817533284425735, -0.12930046021938324, 0.09290379285812378, 0.006831595674157143, -0.08057522773742676, 0.055727872997522354, 0.04085788503289223, 0.004857952706515789, -0.003672034712508321, -0.11177515983581543, 0.13568143546581268, -0.0249182041734457, -0.0004978884826414287, -0.01893850788474083, 0.018119247630238533, 0.03849770501255989, -0.015622398816049099, -0.1523171365261078, -0.10250391811132431, 0.04879200831055641, -0.08012641966342926, 0.03384386748075485, 0.010250451043248177, -0.10454563051462173, 0.030362842604517937, -0.1357826292514801, 0.06981176882982254, 0.007233846932649612, 0.057431142777204514, 0.07955154031515121, 0.01833319664001465, -0.013906215317547321, -0.07823213189840317, 0.06573616713285446, -0.0005032320623286068, -0.02816144935786724, -0.08905414491891861 ]
1039334864860026c4e424c6443275d76888aed8
# Dataset Card for "skewlognormal_maxlength" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
Asap7772/skewlognormal_maxlength
[ "region:us" ]
2024-01-25T07:38:11+00:00
{"configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}, {"split": "test", "path": "data/test-*"}]}], "dataset_info": {"features": [{"name": "output", "dtype": "string"}, {"name": "text", "dtype": "string"}, {"name": "alpaca_text", "dtype": "string"}, {"name": "prompt", "dtype": "string"}, {"name": "alpaca_prompt", "dtype": "string"}, {"name": "y_ref", "dtype": "string"}, {"name": "y_1", "dtype": "string"}, {"name": "y_2", "dtype": "string"}, {"name": "y_w", "dtype": "string"}, {"name": "y_w_alpaca", "dtype": "string"}, {"name": "y_l", "dtype": "string"}, {"name": "y_l_alpaca", "dtype": "string"}, {"name": "y_w_score", "dtype": "float64"}, {"name": "y_l_score", "dtype": "float64"}, {"name": "score_diff", "dtype": "float64"}], "splits": [{"name": "train", "num_bytes": 77844991, "num_examples": 19000}, {"name": "test", "num_bytes": 4082779, "num_examples": 1000}], "download_size": 40225253, "dataset_size": 81927770}}
2024-01-25T07:38:15+00:00
[]
[]
TAGS #region-us
# Dataset Card for "skewlognormal_maxlength" More Information needed
[ "# Dataset Card for \"skewlognormal_maxlength\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"skewlognormal_maxlength\"\n\nMore Information needed" ]
[ 6, 18 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"skewlognormal_maxlength\"\n\nMore Information needed" ]
[ -0.08569721132516861, 0.10013224184513092, -0.00316444574855268, -0.0007423758506774902, 0.04223310202360153, 0.0590401217341423, 0.08042056113481522, 0.06422039121389389, 0.1212497130036354, 0.016718650236725807, 0.14191760122776031, -0.04228198528289795, 0.015213632956147194, 0.19907882809638977, -0.02050829865038395, -0.12465695291757584, 0.03294176235795021, 0.06471347063779831, -0.0663570687174797, 0.0428629107773304, -0.0020453589968383312, -0.13600370287895203, 0.07152921706438065, -0.1063733696937561, -0.23222610354423523, 0.05262717977166176, -0.046701669692993164, -0.061718348413705826, 0.04058796539902687, -0.07792609930038452, 0.0708494558930397, -0.04478498920798302, -0.012246672995388508, -0.09252358973026276, 0.015387865714728832, 0.0039098928682506084, 0.015110699459910393, 0.0531403087079525, 0.03901764377951622, -0.0014976050006225705, -0.0003134022990707308, -0.09205178171396255, -0.00854401383548975, -0.02292308211326599, -0.08033562451601028, -0.10123474150896072, -0.06300757080316544, -0.021721620112657547, 0.0549209825694561, 0.022078795358538628, 0.03781846538186073, 0.12335444241762161, -0.14259110391139984, 0.030343320220708847, 0.13129974901676178, -0.12501610815525055, -0.0005395465414039791, 0.21261832118034363, -0.0334102101624012, 0.10339900106191635, -0.08686047047376633, 0.0882604569196701, 0.1212877556681633, -0.01451660692691803, 0.021333621814846992, -0.05635838955640793, -0.021988140419125557, 0.10122859477996826, -0.02844167686998844, -0.019943268969655037, 0.21773874759674072, 0.02158936858177185, 0.05076095834374428, -0.05262145400047302, -0.017505470663309097, -0.10974065959453583, -0.007060017436742783, 0.12564119696617126, 0.03815564513206482, -0.021878676488995552, -0.009297279641032219, -0.04787067323923111, -0.09212078154087067, -0.0985056608915329, -0.1662655621767044, 0.09388784319162369, -0.03864036127924919, 0.1646319329738617, -0.13305816054344177, -0.04249541088938713, -0.0821196511387825, -0.04343877360224724, -0.01159797701984644, -0.0769338607788086, -0.06843829154968262, 0.030035551637411118, -0.01126143615692854, 0.033704351633787155, 0.10479138791561127, 0.04874657839536667, 0.12403382360935211, 0.03499756380915642, 0.056384023278951645, 0.08842378854751587, 0.1571013629436493, -0.02735857293009758, -0.009384694509208202, -0.08690360933542252, -0.0077023678459227085, -0.11491736024618149, 0.013589921407401562, -0.08676353842020035, -0.08097173273563385, -0.06712319701910019, -0.09289807826280594, 0.0853458121418953, 0.013065740466117859, -0.10764224827289581, 0.028928879648447037, -0.05041894316673279, 0.14233389496803284, -0.06865746527910233, 0.024987991899251938, -0.04815465956926346, -0.016611797735095024, 0.09392078220844269, -0.12928342819213867, -0.041283123195171356, 0.05096886307001114, 0.10454574972391129, -0.08771654218435287, -0.008690272457897663, -0.02773483470082283, -0.016515769064426422, 0.08196479082107544, -0.21887652575969696, 0.04890422895550728, -0.08924833685159683, -0.18882790207862854, 0.04947016015648842, -0.023826705291867256, -0.07208660989999771, 0.0384606197476387, 0.0034611704759299755, 0.03691590949892998, 0.017501259222626686, -0.0781235471367836, 0.11830198019742966, -0.0748995691537857, -0.00028748225304298103, 0.016243167221546173, 0.11621741205453873, -0.20102307200431824, -0.03239996358752251, -0.11622431129217148, 0.022076863795518875, -0.07349862903356552, 0.05271406099200249, -0.08628221601247787, 0.15516559779644012, -0.06482668966054916, -0.05989032983779907, -0.18276464939117432, 0.00686787161976099, -0.029583999887108803, 0.04771978035569191, -0.23435309529304504, -0.048334751278162, 0.13491742312908173, -0.11890623718500137, -0.17412534356117249, 0.06312576681375504, 0.005564803723245859, -0.08655105531215668, 0.04385194554924965, 0.3227510154247284, 0.028268739581108093, -0.09455440938472748, -0.008813520893454552, 0.13388009369373322, -0.1293894350528717, -0.28605666756629944, 0.056432776153087616, -0.02451152727007866, 0.037830326706171036, 0.0299998689442873, 0.13428188860416412, 0.076902374625206, -0.0455930158495903, -0.064296193420887, -0.05135151743888855, -0.13576805591583252, 0.011482254602015018, 0.014398453757166862, 0.04467162489891052, -0.07652752846479416, 0.1807379424571991, 0.108381487429142, 0.048272211104631424, 0.009413127787411213, 0.005654302891343832, 0.05406799912452698, 0.05238982290029526, -0.1631072610616684, -0.015523145906627178, -0.1124357134103775, -0.149803027510643, -0.0077763753943145275, 0.012617587111890316, 0.05199330300092697, 0.12292111665010452, 0.07311701774597168, -0.03130283206701279, -0.023130059242248535, 0.0781501978635788, 0.13438275456428528, 0.004077008925378323, -0.04429249092936516, 0.049459267407655716, 0.03181666508316994, -0.020234812051057816, -0.13701209425926208, -0.1392599642276764, -0.058788664638996124, 0.06450159102678299, 0.06930147111415863, 0.02069154754281044, -0.028206855058670044, 0.08743228018283844, -0.006187909748405218, 0.006859928835183382, -0.04200737923383713, 0.006905480287969112, -0.03286336734890938, -0.028330570086836815, 0.09700929373502731, -0.05843130126595497, 0.21348001062870026, 0.12471909821033478, -0.045022349804639816, 0.0698951929807663, -0.08688382059335709, 0.02385956048965454, -0.06137939542531967, -0.09260305017232895, 0.03182816132903099, -0.07517395913600922, -0.03356176242232323, 0.03671305626630783, -0.04195549711585045, -0.0038617269601672888, 0.042497336864471436, -0.041242241859436035, -0.05204840376973152, 0.05858984962105751, 0.06973826140165329, -0.2373158037662506, 0.11857717484235764, 0.16687366366386414, 0.07580869644880295, 0.2310853749513626, -0.07569364458322525, -0.10495942831039429, 0.03835373371839523, -0.08729595690965652, -0.08154388517141342, 0.1661539077758789, -0.06186964362859726, -0.027247484773397446, 0.08265922963619232, 0.03571917489171028, 0.07191870361566544, -0.0571531280875206, -0.07665897905826569, -0.015022419393062592, -0.007834721356630325, -0.08397741615772247, 0.03321533277630806, 0.01683487370610237, 0.02654399164021015, 0.038666632026433945, 0.05793532729148865, 0.0925564244389534, -0.01971798948943615, 0.01162105705589056, 0.10202205181121826, -0.11725115031003952, -0.23327337205410004, -0.09187190979719162, -0.10599266737699509, -0.01074140053242445, -0.008385816588997841, -0.047231197357177734, -0.14676031470298767, -0.06054993346333504, 0.035253893584012985, 0.026305073872208595, -0.14549894630908966, 0.06756176054477692, 0.00484716659411788, 0.009254321455955505, -0.05018070712685585, -0.07271526753902435, 0.024619800969958305, -0.07904958724975586, 0.12635250389575958, 0.08504976332187653, -0.05291960760951042, 0.13779160380363464, 0.11453002691268921, -0.020915135741233826, 0.0716220960021019, -0.0028967594262212515, 0.1079028993844986, -0.044559866189956665, -0.039072271436452866, 0.13384220004081726, 0.08881517499685287, -0.012679786421358585, -0.00359904277138412, 0.07438477128744125, -0.10231994092464447, -0.020172080025076866, 0.03690243884921074, -0.22003236413002014, -0.14586131274700165, -0.15269027650356293, -0.11863456666469574, 0.11523108184337616, 0.17365510761737823, 0.061939358711242676, -0.06846236437559128, 0.07081582397222519, 0.12042766064405441, 0.0067276014015078545, -0.14023087918758392, -0.006131678819656372, 0.043024901300668716, 0.051891833543777466, 0.028637785464525223, -0.15111565589904785, -0.006203815340995789, 0.14385341107845306, 0.1366269439458847, 0.19974841177463531, 0.006590340752154589, 0.120792917907238, -0.018534565344452858, 0.08235626667737961, 0.12005003541707993, 0.11477704346179962, 0.038164976984262466, -0.07565323263406754, 0.06426858901977539, -0.003494095057249069, -0.01512239221483469, -0.00641391659155488, 0.09181775152683258, -0.14419038593769073, 0.07002400606870651, -0.049134403467178345, 0.038178298622369766, -0.041950855404138565, 0.12455131113529205, -0.1430029720067978, 0.021217895671725273, -0.011195397935807705, 0.0994933694601059, -0.08607514202594757, 0.07956833392381668, 0.18360042572021484, -0.0363890714943409, 0.10257405787706375, 0.007404349744319916, 0.08697374910116196, -0.03407363221049309, -0.0015481604496017098, -0.08614324033260345, -0.014686995185911655, -0.0024414234794676304, 0.09177093952894211, -0.05806097015738487, 0.14455543458461761, 0.008048707619309425, -0.037981066852808, -0.09824800491333008, -0.10256259888410568, -0.035889770835638046, -0.03368376940488815, 0.0720367282629013, 0.05422072857618332, -0.11791522055864334, -0.13533642888069153, -0.15555378794670105, -0.0424337200820446, 0.09772010892629623, 0.018723612651228905, -0.09292840957641602, 0.06656738370656967, -0.019646501168608665, -0.044096194207668304, -0.19280923902988434, 0.07424692064523697, -0.10387011617422104, -0.04923040792346001, 0.13804924488067627, -0.24693699181079865, 0.027641961351037025, -0.005973908584564924, -0.0921834334731102, 0.17881198227405548, 0.12826403975486755, -0.05580335110425949, -0.07848935574293137, 0.015897734090685844, 0.2131086140871048, -0.021882355213165283, 0.08471948653459549, 0.02376564033329487, 0.05797123908996582, -0.01828623004257679, -0.18970395624637604, 0.08283789455890656, -0.053423378616571426, 0.08650225400924683, -0.02526325359940529, 0.14447283744812012, -0.04278280958533287, 0.023438552394509315, -0.013754012994468212, 0.04871032014489174, -0.07964913547039032, -0.10800385475158691, 0.06676680594682693, -0.08408645540475845, 0.11545030027627945, 0.22823718190193176, 0.1307952105998993, 0.07609404623508453, 0.1283230185508728, -0.02335316501557827, 0.1763271987438202, 0.07704800367355347, -0.052507124841213226, 0.18339252471923828, 0.07188627868890762, 0.035376403480768204, -0.27100443840026855, 0.030773494392633438, -0.12840017676353455, -0.011591264046728611, -0.022267494350671768, -0.12407935410737991, 0.2492714822292328, 0.14608240127563477, -0.018610311672091484, 0.28639405965805054, -0.16614457964897156, -0.049392469227313995, 0.10278481990098953, 0.004620368126779795, 0.38582906126976013, -0.059470806270837784, -0.07383524626493454, -0.09337737411260605, -0.1745292842388153, 0.19174525141716003, -0.12042224407196045, 0.025302326306700706, -0.03993658348917961, 0.08317705243825912, -0.001476145233027637, -0.07417690753936768, 0.15769635140895844, 0.06627439707517624, 0.07071192562580109, -0.07986040413379669, 0.032704971730709076, 0.1497483104467392, -0.06253296136856079, 0.10326482355594635, -0.08301691710948944, 0.013277070596814156, -0.10618861019611359, -0.03692781552672386, 0.0074638500809669495, -0.012296900153160095, 0.09418290108442307, -0.04431471601128578, -0.09222705662250519, -0.02642710693180561, -0.10403614491224289, 0.005265405867248774, 0.14819477498531342, 0.003306445199996233, -0.03431791067123413, 0.056349318474531174, 0.016709741204977036, -0.19328665733337402, -0.15043465793132782, -0.08823544532060623, -0.06744534522294998, 0.06257383525371552, -0.18944096565246582, 0.03515252098441124, 0.10108055174350739, 0.052569303661584854, -0.06355172395706177, 0.04208878427743912, -0.007322357036173344, 0.01557349506765604, 0.1576468050479889, -0.10687188059091568, -0.020445019006729126, 0.06002768129110336, -0.09402074664831161, -0.013107308186590672, 0.05010032653808594, -0.0005998504930175841, 0.04442287236452103, 0.01833728887140751, -0.031916458159685135, 0.01946454681456089, -0.0644284263253212, 0.14785410463809967, 0.06403452903032303, 0.07373244315385818, -0.19386012852191925, 0.16293494403362274, -0.014349152334034443, 0.015559571795165539, -0.009674199856817722, 0.12565912306308746, -0.09050639718770981, -0.09091950207948685, 0.019739210605621338, 0.098020538687706, -0.10669296979904175, -0.06963568180799484, 0.04323267191648483, -0.08116282522678375, 0.01774556003510952, 0.03943708539009094, 0.0558590292930603, 0.05784374475479126, -0.013856738805770874, -0.06105291098356247, -0.04930003732442856, -0.05893943831324577, -0.04438585788011551, 0.08054665476083755, -0.08707405626773834, -0.05412731692194939, -0.060995765030384064, 0.15948019921779633, -0.07858484983444214, -0.014834518544375896, -0.06400767713785172, 0.04245183989405632, -0.10601847618818283, 0.03683433309197426, -0.0038505580741912127, 0.018812259659171104, -0.04835308715701103, 0.05001314356923103, -0.04368835687637329, -0.04010573402047157, -0.09482056647539139, 0.03842005878686905, 0.018245890736579895, -0.04637523740530014, -0.04630523920059204, -0.037709422409534454, 0.056264426559209824, 0.03199607878923416, 0.0836702212691307, 0.10161715745925903, 0.04138265177607536, 0.10725148022174835, 0.007786483969539404, -0.1104029044508934, 0.15793082118034363, 0.07484710961580276, 0.07967766374349594, 0.13827607035636902, -0.02640692889690399, 0.0965157225728035, 0.08208473771810532, 0.09136661142110825, -0.16223406791687012, -0.084060437977314, -0.07061173766851425, -0.19111551344394684, -0.09751049429178238, 0.015581744723021984, -0.08479791134595871, 0.14573989808559418, 0.07907704263925552, -0.012767836451530457, 0.02046254836022854, 0.018784090876579285, -0.04612698778510094, -0.0459086112678051, -0.0427272729575634, -0.15450572967529297, 0.03318537399172783, 0.06377892941236496, 0.06751345843076706, -0.038922470062971115, 0.4011092185974121, -0.028223101049661636, -0.1686401516199112, -0.03736501559615135, 0.11536799371242523, -0.021494699642062187, 0.00536826578900218, 0.43947678804397583, 0.09869784861803055, -0.05280883237719536, -0.007073820102959871, 0.12342531234025955, 0.07130032032728195, 0.2261894792318344, 0.13315095007419586, 0.08591751009225845, -0.015319467522203922, 0.0088001424446702, 0.012730211019515991, -0.07831139862537384, -0.013258845545351505, 0.08825717121362686, -0.08642124384641647, 0.06377347558736801, 0.047836728394031525, -0.03822920843958855, 0.07243667542934418, -0.06594330817461014, -0.028813334181904793, 0.003124524373561144, -0.058567922562360764, -0.09574846923351288, -0.12393065541982651, -0.041329316794872284, -0.06287486106157303, 0.03763359785079956, -0.04900356009602547, 0.0013041032943874598, 0.14481306076049805, 0.06200549006462097, 0.019283771514892578, 0.13055719435214996, 0.0023254912812262774, 0.05615512654185295, 0.05333717539906502, 0.007545190397650003, -0.07842455059289932, 0.09546114504337311, -0.005265559535473585, 0.04935634881258011, -0.07149092853069305, -0.024025773629546165, -0.06218006834387779, 0.043413154780864716, 0.04285730794072151, -0.08186870068311691, -0.07431205362081528, -0.0733768567442894, 0.07680356502532959, -0.010668952949345112, 0.07008049637079239, 0.08343004435300827, -0.011811197735369205, -0.020390039309859276, 0.1598077118396759, -0.027591420337557793, -0.050350870937108994, -0.0007199437241069973, -0.03310632333159447, -0.035710688680410385, 0.06186152249574661, -0.08570591360330582, -0.050449859350919724, -0.04275371506810188, 0.15503151714801788, 0.24712884426116943, -0.053292471915483475, 0.0005988742341287434, -0.04204491153359413, 0.02682451345026493, 0.009343231096863747, 0.1564740389585495, 0.034667328000068665, 0.13985756039619446, -0.003618745831772685, -0.08917073905467987, 0.00871189683675766, -0.039803050458431244, -0.1333191692829132, -0.016023559495806694, 0.03749878332018852, -0.050632577389478683, -0.10722695291042328, 0.08044379204511642, -0.12496548146009445, 0.18553614616394043, 0.09438935667276382, -0.16210351884365082, -0.0845060870051384, -0.0026738441083580256, 0.033670924603939056, 0.010973685421049595, 0.012911728583276272, -0.10919014364480972, 0.013001806102693081, -0.15411339700222015, -0.025982720777392387, -0.3529346287250519, -0.14664003252983093, -0.00950052309781313, 0.1098439171910286, 0.06980977952480316, -0.004411355592310429, 0.05298835039138794, 0.031807657331228256, 0.021114720031619072, -0.11339700222015381, 0.06478334963321686, -0.05682997778058052, -0.020343217998743057, -0.03160180151462555, 0.0778246745467186, -0.06174951791763306, -0.15548576414585114, 0.08983232080936432, 0.0022727216128259897, -0.12779711186885834, 0.05348917469382286, 0.07733543962240219, -0.00041953034815378487, -0.04686075821518898, -0.06608975678682327, 0.13421960175037384, -0.010108628310263157, -0.0249636173248291, -0.03057892434298992, 0.028816815465688705, 0.08016852289438248, -0.00920284353196621, -0.13732706010341644, -0.08409599214792252, -0.0031258265953511, -0.11567439138889313, 0.060722049325704575, -0.011253324337303638, -0.18004319071769714, 0.017336564138531685, -0.11605315655469894, 0.05717584118247032, -0.0061719706282019615, 0.059151940047740936, 0.09807540476322174, 0.021015211939811707, -0.022931652143597603, -0.08020976930856705, 0.06640088558197021, -0.018346799537539482, -0.05630374327301979, -0.10530036687850952 ]
e8ced1e9de9607a06780fcdff4a6a93c0a356ed5
# Dataset Card for "skewlognormal_minlength" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
Asap7772/skewlognormal_minlength
[ "region:us" ]
2024-01-25T07:38:20+00:00
{"configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}, {"split": "test", "path": "data/test-*"}]}], "dataset_info": {"features": [{"name": "output", "dtype": "string"}, {"name": "text", "dtype": "string"}, {"name": "alpaca_text", "dtype": "string"}, {"name": "prompt", "dtype": "string"}, {"name": "alpaca_prompt", "dtype": "string"}, {"name": "y_ref", "dtype": "string"}, {"name": "y_1", "dtype": "string"}, {"name": "y_2", "dtype": "string"}, {"name": "y_w", "dtype": "string"}, {"name": "y_w_alpaca", "dtype": "string"}, {"name": "y_l", "dtype": "string"}, {"name": "y_l_alpaca", "dtype": "string"}, {"name": "y_w_score", "dtype": "float64"}, {"name": "y_l_score", "dtype": "float64"}, {"name": "score_diff", "dtype": "float64"}], "splits": [{"name": "train", "num_bytes": 77844991, "num_examples": 19000}, {"name": "test", "num_bytes": 4082779, "num_examples": 1000}], "download_size": 40225094, "dataset_size": 81927770}}
2024-01-25T07:38:24+00:00
[]
[]
TAGS #region-us
# Dataset Card for "skewlognormal_minlength" More Information needed
[ "# Dataset Card for \"skewlognormal_minlength\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"skewlognormal_minlength\"\n\nMore Information needed" ]
[ 6, 18 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"skewlognormal_minlength\"\n\nMore Information needed" ]
[ -0.0835992768406868, 0.13321241736412048, -0.003932690713554621, -0.0035192423965781927, 0.03668644651770592, 0.0384204126894474, 0.10873758792877197, 0.06722711771726608, 0.04716544598340988, 0.016360806301236153, 0.15619075298309326, -0.026309998705983162, 0.0291066262871027, 0.16556790471076965, -0.05095263570547104, -0.1628110557794571, 0.034203335642814636, 0.06317871809005737, -0.04125151038169861, 0.03771509602665901, -0.00566513417288661, -0.11266190558671951, 0.06992931663990021, -0.1116754561662674, -0.20315302908420563, 0.031608037650585175, -0.042512595653533936, -0.07824587821960449, 0.036574918776750565, -0.06569226086139679, 0.0860372930765152, -0.04306168109178543, -0.0023321849294006824, -0.10145074874162674, 0.019544679671525955, -0.002814691746607423, 0.0244020763784647, 0.06100840121507645, 0.02475236915051937, -0.008104652166366577, -0.01682606153190136, -0.09034669399261475, 0.014211677946150303, -0.034728024154901505, -0.08733750879764557, -0.10819817334413528, -0.060659442096948624, -0.020159319043159485, 0.0398077555000782, 0.02606676146388054, 0.03637256100773811, 0.12513896822929382, -0.1447516828775406, 0.03541697934269905, 0.13304850459098816, -0.06256978213787079, -0.01304153073579073, 0.20604713261127472, -0.0629933550953865, 0.1145176813006401, -0.08297823369503021, 0.1108977273106575, 0.09711837023496628, -0.009019172750413418, -0.001437411759980023, -0.06161930039525032, -0.05172943323850632, 0.10322002321481705, -0.021484019234776497, -0.03847147151827812, 0.1837318241596222, 0.01075111422687769, 0.055731434375047684, -0.04956350103020668, -0.01072667259722948, -0.08342958986759186, -0.04231814295053482, 0.13289710879325867, 0.03607403486967087, -0.008110403083264828, -0.024929868057370186, -0.06486660242080688, -0.09297514706850052, -0.10363788157701492, -0.17904677987098694, 0.12527327239513397, -0.03605646267533302, 0.1584545522928238, -0.16954565048217773, -0.038519442081451416, -0.06725303828716278, -0.050069477409124374, -0.009043262340128422, -0.06497835367918015, -0.006153903901576996, 0.030707431957125664, -0.015742089599370956, 0.032082561403512955, 0.1163581982254982, 0.07274620980024338, 0.11038712412118912, 0.05210807919502258, 0.04070931673049927, 0.08399561792612076, 0.12495294958353043, 0.0031014084815979004, 0.018513010814785957, -0.07316507399082184, -0.01276395469903946, -0.11339537054300308, 0.035813357681035995, -0.10565531253814697, -0.08426525443792343, -0.0687737762928009, -0.08180500566959381, 0.08827441930770874, 0.020382633432745934, -0.10445535182952881, 0.030507097020745277, -0.041750065982341766, 0.11307557672262192, -0.06969980150461197, 0.03692576661705971, -0.05350859835743904, -0.023483717814087868, 0.0845024436712265, -0.10711264610290527, -0.030937986448407173, 0.041421856731176376, 0.11827407777309418, -0.08757071942090988, 0.0027392751071602106, -0.02581198886036873, -0.030259868130087852, 0.07040812075138092, -0.21733883023262024, 0.0689738392829895, -0.1043122336268425, -0.17210660874843597, 0.052952323108911514, -0.029764873906970024, -0.06656419485807419, 0.07665269821882248, -0.028850099071860313, 0.021009812131524086, 0.014323629438877106, -0.06532789766788483, 0.10961632430553436, -0.06764648109674454, 0.004748221952468157, -0.006551612168550491, 0.12218274921178818, -0.21799339354038239, -0.01436575222760439, -0.1207558736205101, 0.013935002498328686, -0.049580227583646774, 0.05823326110839844, -0.11127077043056488, 0.1401512771844864, -0.038835685700178146, -0.05147598311305046, -0.19319915771484375, 0.007639497518539429, -0.01308976300060749, 0.056649960577487946, -0.2026011049747467, -0.0588819682598114, 0.151453897356987, -0.12226947396993637, -0.11374898999929428, 0.05383191630244255, 0.010608782060444355, -0.0834747776389122, 0.04124823957681656, 0.3908448815345764, 0.08243075758218765, -0.08711837977170944, 0.048914384096860886, 0.15330156683921814, -0.12659282982349396, -0.2732674181461334, 0.035596344619989395, -0.028513388708233833, -0.012256995774805546, 0.041168756783008575, 0.1258479803800583, 0.07081229984760284, -0.06910370290279388, -0.06222958117723465, -0.026855159550905228, -0.1228623017668724, 0.013456993736326694, 0.03258682042360306, 0.0488731749355793, -0.0801997035741806, 0.19518661499023438, 0.08586446195840836, 0.06303990632295609, -0.00047140568494796753, 0.003388437209650874, 0.04593253135681152, 0.07307349145412445, -0.1046779528260231, -0.01921171322464943, -0.11968696117401123, -0.15207648277282715, -0.012282689101994038, 0.005534708499908447, 0.06199177727103233, 0.09697430580854416, 0.07213103771209717, -0.021206388249993324, -0.0068875765427947044, 0.08293025940656662, 0.10873206704854965, 0.01629643514752388, -0.05638263002038002, 0.055871494114398956, 0.06709209084510803, -0.030100200325250626, -0.09370802342891693, -0.09653173387050629, -0.056305162608623505, 0.0612526461482048, 0.06129920482635498, 0.025258051231503487, -0.021895544603466988, 0.07788507640361786, 0.007978413254022598, 0.025395283475518227, -0.03850409388542175, 0.004228842910379171, -0.027610106393694878, -0.013649197295308113, 0.0712117999792099, -0.06395727396011353, 0.16635465621948242, 0.14106394350528717, -0.04131218418478966, 0.0773313045501709, -0.1312919706106186, 0.03419443219900131, -0.04660724475979805, -0.09729838371276855, 0.032191090285778046, -0.06311195343732834, -0.04250266030430794, 0.03932669013738632, -0.04789566993713379, 0.016768096014857292, 0.0268957931548357, -0.07563953846693039, -0.06589656323194504, 0.07809925079345703, 0.05548159033060074, -0.20277874171733856, 0.12991781532764435, 0.1508999615907669, 0.06603420525789261, 0.22814051806926727, -0.08181388676166534, -0.10526025295257568, 0.041089631617069244, -0.09564274549484253, -0.07560788840055466, 0.1653698831796646, -0.07987113296985626, -0.025926027446985245, 0.08960060030221939, 0.03852960839867592, 0.08645720779895782, -0.08169577270746231, -0.08297079056501389, -0.01656796969473362, -0.010933225043118, -0.1031809002161026, 0.02797001414000988, 0.01448600459843874, 0.029557572677731514, 0.03356018289923668, 0.036907173693180084, 0.07105576246976852, -0.014396533370018005, 0.013680254109203815, 0.10072682052850723, -0.15437480807304382, -0.23554286360740662, -0.12063656747341156, -0.09277410805225372, -0.001879270887002349, -0.007786355912685394, -0.025412559509277344, -0.14744268357753754, -0.057874348014593124, 0.013345323503017426, 0.03893755003809929, -0.12795354425907135, 0.05527668818831444, 0.02348092757165432, 0.008830110542476177, -0.0934949591755867, -0.0863136574625969, 0.010779899545013905, -0.0905635803937912, 0.18513382971286774, 0.11534424871206284, -0.08963774144649506, 0.1249924898147583, 0.12688115239143372, -0.03155915439128876, 0.079855777323246, -0.009170972742140293, 0.12891462445259094, -0.0270286463201046, -0.03784971684217453, 0.11836674064397812, 0.07987324148416519, 0.010974847711622715, 0.007261316757649183, 0.06856556236743927, -0.11108576506376266, -0.029443619772791862, 0.0310602355748415, -0.20606109499931335, -0.13337312638759613, -0.14908646047115326, -0.09446640312671661, 0.1474306434392929, 0.13241922855377197, 0.0470876581966877, -0.041122522205114365, 0.07160275429487228, 0.12584443390369415, -0.011258826591074467, -0.10710634291172028, -0.011362836696207523, 0.09411486238241196, 0.020559130236506462, 0.022037705406546593, -0.1363658756017685, 0.0029703506734222174, 0.1568748503923416, 0.1084529384970665, 0.20312397181987762, 0.031546056270599365, 0.09925997257232666, -0.02537078596651554, 0.09523101896047592, 0.1236780509352684, 0.09396068006753922, 0.0658988282084465, -0.07232417166233063, 0.05457063764333725, 0.011531590484082699, -0.037885330617427826, -0.01936654932796955, 0.08126617968082428, -0.15144094824790955, 0.07185441255569458, 0.014086393639445305, 0.020048147067427635, -0.0548400953412056, 0.12650148570537567, -0.14704854786396027, 0.014064501039683819, -0.016820501536130905, 0.09592678397893906, -0.08578359335660934, 0.0888100191950798, 0.19254295527935028, -0.03111198917031288, 0.060701336711645126, 0.009497418999671936, 0.0945364236831665, -0.0466911718249321, -0.002502910792827606, -0.09995505958795547, -0.01932908408343792, -0.00677291676402092, 0.0833725854754448, -0.057804763317108154, 0.1766955405473709, 0.0051188478246331215, -0.04597172141075134, -0.0904264822602272, -0.09068489074707031, -0.035296306014060974, 0.005156031809747219, 0.06307341903448105, 0.05202588811516762, -0.13149823248386383, -0.17665508389472961, -0.19048111140727997, -0.04735834524035454, 0.14443834125995636, 0.014883546158671379, -0.09647434204816818, 0.038436781615018845, -0.0008843018440529704, -0.039029091596603394, -0.26183900237083435, 0.07631701231002808, -0.10155696421861649, -0.043664753437042236, 0.13367711007595062, -0.24324919283390045, 0.021662143990397453, 0.0025033142883330584, -0.0687679648399353, 0.11484768241643906, 0.14956294000148773, -0.06430798768997192, -0.09056142717599869, 0.02996530942618847, 0.1870482712984085, -0.037321023643016815, 0.08796656131744385, 0.03175678849220276, 0.03023364581167698, -0.007299472112208605, -0.18541166186332703, 0.07721306383609772, -0.058768246322870255, 0.10127519071102142, -0.022360825911164284, 0.13512849807739258, -0.042803805321455, 0.02137349545955658, -0.00909764040261507, 0.038905564695596695, -0.08927703648805618, -0.10848423093557358, 0.06791751831769943, -0.059480659663677216, 0.09749727696180344, 0.20198872685432434, 0.10894428193569183, 0.03868088498711586, 0.10879124701023102, -0.017604701220989227, 0.16793827712535858, 0.07251375168561935, -0.05294174700975418, 0.1760929375886917, 0.1190011128783226, 0.04026436060667038, -0.2975671589374542, 0.02601933851838112, -0.1284322440624237, -0.01653759554028511, 0.004074490629136562, -0.12439560890197754, 0.22860990464687347, 0.14250627160072327, -0.007565306965261698, 0.23411925137043, -0.21422593295574188, -0.044742487370967865, 0.13958404958248138, 0.03710531070828438, 0.3528631627559662, -0.060303859412670135, -0.08959074318408966, -0.06267295032739639, -0.1980130821466446, 0.15709586441516876, -0.11792127043008804, 0.023725416511297226, -0.017152482643723488, 0.127968430519104, 0.013785467483103275, -0.058468762785196304, 0.17669463157653809, 0.05443446710705757, 0.09266169369220734, -0.08871221542358398, -0.026484409347176552, 0.13710607588291168, -0.0555993989109993, 0.08382399380207062, -0.035870831459760666, 0.04371356591582298, -0.061393823474645615, -0.02650347724556923, 0.00613339152187109, -0.016700346022844315, 0.08883804827928543, -0.061779629439115524, -0.0942215695977211, -0.031388312578201294, -0.12383931875228882, -0.009078790433704853, 0.14397594332695007, -0.006538331974297762, -0.08830006420612335, 0.04778536409139633, -0.0005239912425167859, -0.18245720863342285, -0.07538063079118729, -0.09629286825656891, -0.06791198253631592, 0.05354870483279228, -0.15631768107414246, 0.018796784803271294, 0.08747048676013947, 0.05534522607922554, -0.04314751550555229, 0.022249121218919754, 0.016202827915549278, 0.010371850803494453, 0.1351398527622223, -0.09899500757455826, -0.05460477992892265, 0.05076253041625023, -0.11670880019664764, 0.015859611332416534, 0.04295212775468826, -0.006753029767423868, 0.0605769157409668, 0.019245723262429237, -0.03585674241185188, 0.03125424310564995, -0.05391324311494827, 0.13193579018115997, 0.0782279521226883, 0.04319965839385986, -0.19732165336608887, 0.176283597946167, -0.017087029293179512, -0.0024463492445647717, -0.013808599673211575, 0.1263376772403717, -0.11023785173892975, -0.0899820327758789, 0.001967713236808777, 0.14282581210136414, -0.11803656071424484, -0.07455502450466156, 0.04272351786494255, -0.0674046203494072, 0.009254565462470055, 0.050991084426641464, 0.04777941480278969, 0.05458464100956917, -0.017500964924693108, -0.029525594785809517, -0.03400949761271477, -0.040048446506261826, -0.027757108211517334, 0.07261963188648224, -0.0685105174779892, -0.04781505838036537, -0.0651143342256546, 0.14734938740730286, -0.0738987997174263, -0.02631421759724617, -0.07254322618246078, 0.05336785688996315, -0.11350291967391968, 0.055883895605802536, -0.011637374758720398, 0.024123618379235268, -0.020496491342782974, 0.030032696202397346, -0.03433777764439583, -0.0324421189725399, -0.08080131560564041, 0.048160552978515625, 0.002963349223136902, -0.03648128733038902, -0.05457872897386551, -0.05013246089220047, 0.06513508409261703, 0.04315998777747154, 0.08192016184329987, 0.13052085041999817, 0.05256180092692375, 0.10593008995056152, 0.002228048164397478, -0.10112587362527847, 0.1527097374200821, 0.07151542603969574, 0.07986872643232346, 0.11751142889261246, -0.05259837582707405, 0.07493039220571518, 0.0589536614716053, 0.09270171821117401, -0.09208699315786362, -0.08225513994693756, -0.052474524825811386, -0.1884397715330124, -0.12786336243152618, 0.00048527008038945496, -0.06554610282182693, 0.1648692786693573, 0.05656413361430168, -0.02345249243080616, 0.02367677167057991, 0.02857459895312786, -0.013161872513592243, -0.04402315989136696, -0.027258310467004776, -0.16270530223846436, 0.01628846488893032, 0.0431223139166832, 0.062392737716436386, -0.04623192921280861, 0.33205264806747437, 0.010925290174782276, -0.1771344244480133, -0.03244348615407944, 0.11031683534383774, -0.025667326524853706, 0.01217047031968832, 0.4168650209903717, 0.11601945012807846, -0.06188533455133438, -0.021821608766913414, 0.11338172852993011, 0.06603693217039108, 0.21045176684856415, 0.1358659416437149, 0.10626782476902008, -0.014038853347301483, 0.023702451959252357, 0.00018400060071144253, -0.033954281359910965, -0.08281121402978897, 0.10172798484563828, -0.07956130802631378, 0.04872184246778488, 0.03187689557671547, -0.044831547886133194, 0.08028839528560638, -0.07766658812761307, -0.00875911582261324, 0.026684720069169998, -0.07262516021728516, -0.09458727389574051, -0.1707126349210739, -0.04715913534164429, -0.06651411950588226, 0.019827069714665413, -0.06032257154583931, 0.02167665772140026, 0.14581336081027985, 0.056673306971788406, 0.0311212707310915, 0.15067623555660248, -0.053142011165618896, 0.04247652366757393, 0.049566660076379776, -0.00010842616757145151, -0.07484511286020279, 0.039073195308446884, -0.03357017785310745, 0.04485711082816124, -0.056210946291685104, -0.020513450726866722, -0.055257029831409454, 0.04627809301018715, 0.018981149420142174, -0.11826203763484955, -0.08044154196977615, -0.0697651356458664, 0.06302350014448166, -0.027436133474111557, 0.04158717766404152, 0.0796644538640976, -0.0026584321167320013, -0.0016671003540977836, 0.19347651302814484, -0.032696716487407684, -0.07438678294420242, -0.013480964116752148, -0.0312061570584774, -0.047996699810028076, 0.08593755215406418, -0.075137197971344, -0.06514101475477219, -0.029844967648386955, 0.18449322879314423, 0.2211238443851471, -0.04667460173368454, -0.00011779554188251495, -0.012256820686161518, 0.029014715924859047, 0.003576524555683136, 0.141503244638443, 0.0273971538990736, 0.09758656471967697, 0.002474012551829219, -0.133798748254776, 0.005589172709733248, -0.07028813660144806, -0.148759126663208, -0.011902211233973503, 0.04400015249848366, -0.07043588161468506, -0.1294986754655838, 0.08764123916625977, -0.1261400431394577, 0.18874430656433105, 0.10433507710695267, -0.1513601392507553, -0.08036714792251587, 0.009867866523563862, 0.04141828045248985, -0.00781860388815403, 0.028940485790371895, -0.0936184972524643, -0.0015144007047638297, -0.10516034066677094, -0.02115066535770893, -0.33588314056396484, -0.1323501616716385, 0.00599448150023818, 0.07170528173446655, 0.06902774423360825, -0.01178461592644453, 0.03367890417575836, 0.024556998163461685, 0.041687462478876114, -0.0897039994597435, 0.07648299634456635, -0.02348659373819828, 0.019487988203763962, -0.0478346087038517, 0.06787647306919098, -0.07180474698543549, -0.13882185518741608, 0.09949377924203873, -0.016967663541436195, -0.1185266301035881, 0.044146906584501266, 0.06608401238918304, 0.018786653876304626, -0.05767581984400749, -0.06885480880737305, 0.1389017403125763, -0.027130454778671265, -0.004766007419675589, -0.033801231533288956, 0.042474955320358276, 0.05172622948884964, -0.01680532842874527, -0.11993495374917984, -0.07778026163578033, 0.017743242904543877, -0.13737840950489044, 0.0517527274787426, 0.0030525645706802607, -0.15676911175251007, 0.022904368117451668, -0.11670316755771637, 0.06639550626277924, 0.005666651763021946, 0.06338687986135483, 0.12573044002056122, -0.004613853059709072, -0.029975438490509987, -0.06849990040063858, 0.07428041845560074, 0.011144587770104408, -0.03852347657084465, -0.10003548115491867 ]
227811efddfa85ec7d5d246738f5558521e6ab82
# Dataset Card for Evaluation run of zhengr/MixTAO-7Bx2-MoE-Instruct-v6.0 <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [zhengr/MixTAO-7Bx2-MoE-Instruct-v6.0](https://huggingface.co/zhengr/MixTAO-7Bx2-MoE-Instruct-v6.0) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_zhengr__MixTAO-7Bx2-MoE-Instruct-v6.0", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-01-25T07:43:54.051731](https://huggingface.co/datasets/open-llm-leaderboard/details_zhengr__MixTAO-7Bx2-MoE-Instruct-v6.0/blob/main/results_2024-01-25T07-43-54.051731.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.6533190243240277, "acc_stderr": 0.032078440041778726, "acc_norm": 0.6517191668467992, "acc_norm_stderr": 0.03277441388111557, "mc1": 0.5850673194614443, "mc1_stderr": 0.017248314465805978, "mc2": 0.7044948614297283, "mc2_stderr": 0.015034716941280082 }, "harness|arc:challenge|25": { "acc": 0.7090443686006825, "acc_stderr": 0.013273077865907588, "acc_norm": 0.7337883959044369, "acc_norm_stderr": 0.012915774781523205 }, "harness|hellaswag|10": { "acc": 0.7341167098187612, "acc_stderr": 0.004408994868650098, "acc_norm": 0.890161322445728, "acc_norm_stderr": 0.003120495238827556 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.33, "acc_stderr": 0.04725815626252605, "acc_norm": 0.33, "acc_norm_stderr": 0.04725815626252605 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.6296296296296297, "acc_stderr": 0.041716541613545426, "acc_norm": 0.6296296296296297, "acc_norm_stderr": 0.041716541613545426 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.6973684210526315, "acc_stderr": 0.03738520676119669, "acc_norm": 0.6973684210526315, "acc_norm_stderr": 0.03738520676119669 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.65, "acc_stderr": 0.0479372485441102, "acc_norm": 0.65, "acc_norm_stderr": 0.0479372485441102 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.7245283018867924, "acc_stderr": 0.027495663683724057, "acc_norm": 0.7245283018867924, "acc_norm_stderr": 0.027495663683724057 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.7638888888888888, "acc_stderr": 0.03551446610810826, "acc_norm": 0.7638888888888888, "acc_norm_stderr": 0.03551446610810826 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.5, "acc_stderr": 0.050251890762960605, "acc_norm": 0.5, "acc_norm_stderr": 0.050251890762960605 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.52, "acc_stderr": 0.050211673156867795, "acc_norm": 0.52, "acc_norm_stderr": 0.050211673156867795 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.31, "acc_stderr": 0.04648231987117316, "acc_norm": 0.31, "acc_norm_stderr": 0.04648231987117316 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.6705202312138728, "acc_stderr": 0.03583901754736411, "acc_norm": 0.6705202312138728, "acc_norm_stderr": 0.03583901754736411 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.4215686274509804, "acc_stderr": 0.04913595201274498, "acc_norm": 0.4215686274509804, "acc_norm_stderr": 0.04913595201274498 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.77, "acc_stderr": 0.04229525846816506, "acc_norm": 0.77, "acc_norm_stderr": 0.04229525846816506 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.574468085106383, "acc_stderr": 0.03232146916224468, "acc_norm": 0.574468085106383, "acc_norm_stderr": 0.03232146916224468 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.47368421052631576, "acc_stderr": 0.046970851366478626, "acc_norm": 0.47368421052631576, "acc_norm_stderr": 0.046970851366478626 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.5655172413793104, "acc_stderr": 0.04130740879555498, "acc_norm": 0.5655172413793104, "acc_norm_stderr": 0.04130740879555498 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.41005291005291006, "acc_stderr": 0.025331202438944423, "acc_norm": 0.41005291005291006, "acc_norm_stderr": 0.025331202438944423 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.4603174603174603, "acc_stderr": 0.04458029125470973, "acc_norm": 0.4603174603174603, "acc_norm_stderr": 0.04458029125470973 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.35, "acc_stderr": 0.04793724854411019, "acc_norm": 0.35, "acc_norm_stderr": 0.04793724854411019 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.7967741935483871, "acc_stderr": 0.02289168798455496, "acc_norm": 0.7967741935483871, "acc_norm_stderr": 0.02289168798455496 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.5073891625615764, "acc_stderr": 0.035176035403610105, "acc_norm": 0.5073891625615764, "acc_norm_stderr": 0.035176035403610105 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.69, "acc_stderr": 0.04648231987117316, "acc_norm": 0.69, "acc_norm_stderr": 0.04648231987117316 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.7818181818181819, "acc_stderr": 0.03225078108306289, "acc_norm": 0.7818181818181819, "acc_norm_stderr": 0.03225078108306289 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.803030303030303, "acc_stderr": 0.028335609732463362, "acc_norm": 0.803030303030303, "acc_norm_stderr": 0.028335609732463362 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.9015544041450777, "acc_stderr": 0.021500249576033456, "acc_norm": 0.9015544041450777, "acc_norm_stderr": 0.021500249576033456 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.6743589743589744, "acc_stderr": 0.02375966576741229, "acc_norm": 0.6743589743589744, "acc_norm_stderr": 0.02375966576741229 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.32222222222222224, "acc_stderr": 0.028493465091028593, "acc_norm": 0.32222222222222224, "acc_norm_stderr": 0.028493465091028593 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.6722689075630253, "acc_stderr": 0.03048991141767323, "acc_norm": 0.6722689075630253, "acc_norm_stderr": 0.03048991141767323 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.33774834437086093, "acc_stderr": 0.038615575462551684, "acc_norm": 0.33774834437086093, "acc_norm_stderr": 0.038615575462551684 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.8422018348623853, "acc_stderr": 0.015630022970092434, "acc_norm": 0.8422018348623853, "acc_norm_stderr": 0.015630022970092434 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.5231481481481481, "acc_stderr": 0.03406315360711507, "acc_norm": 0.5231481481481481, "acc_norm_stderr": 0.03406315360711507 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.8480392156862745, "acc_stderr": 0.025195658428931792, "acc_norm": 0.8480392156862745, "acc_norm_stderr": 0.025195658428931792 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.7890295358649789, "acc_stderr": 0.026558372502661916, "acc_norm": 0.7890295358649789, "acc_norm_stderr": 0.026558372502661916 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.695067264573991, "acc_stderr": 0.030898610882477515, "acc_norm": 0.695067264573991, "acc_norm_stderr": 0.030898610882477515 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.7709923664122137, "acc_stderr": 0.036853466317118506, "acc_norm": 0.7709923664122137, "acc_norm_stderr": 0.036853466317118506 }, "harness|hendrycksTest-international_law|5": { "acc": 0.7768595041322314, "acc_stderr": 0.03800754475228732, "acc_norm": 0.7768595041322314, "acc_norm_stderr": 0.03800754475228732 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.7777777777777778, "acc_stderr": 0.0401910747255735, "acc_norm": 0.7777777777777778, "acc_norm_stderr": 0.0401910747255735 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.7791411042944786, "acc_stderr": 0.03259177392742178, "acc_norm": 0.7791411042944786, "acc_norm_stderr": 0.03259177392742178 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.42857142857142855, "acc_stderr": 0.04697113923010212, "acc_norm": 0.42857142857142855, "acc_norm_stderr": 0.04697113923010212 }, "harness|hendrycksTest-management|5": { "acc": 0.7669902912621359, "acc_stderr": 0.04185832598928315, "acc_norm": 0.7669902912621359, "acc_norm_stderr": 0.04185832598928315 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8888888888888888, "acc_stderr": 0.020588491316092368, "acc_norm": 0.8888888888888888, "acc_norm_stderr": 0.020588491316092368 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.68, "acc_stderr": 0.04688261722621504, "acc_norm": 0.68, "acc_norm_stderr": 0.04688261722621504 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.8237547892720306, "acc_stderr": 0.013625556907993459, "acc_norm": 0.8237547892720306, "acc_norm_stderr": 0.013625556907993459 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.7369942196531792, "acc_stderr": 0.02370309952525817, "acc_norm": 0.7369942196531792, "acc_norm_stderr": 0.02370309952525817 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.4480446927374302, "acc_stderr": 0.016631976628930595, "acc_norm": 0.4480446927374302, "acc_norm_stderr": 0.016631976628930595 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.7189542483660131, "acc_stderr": 0.025738854797818733, "acc_norm": 0.7189542483660131, "acc_norm_stderr": 0.025738854797818733 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.7106109324758842, "acc_stderr": 0.025755865922632945, "acc_norm": 0.7106109324758842, "acc_norm_stderr": 0.025755865922632945 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.7469135802469136, "acc_stderr": 0.024191808600712995, "acc_norm": 0.7469135802469136, "acc_norm_stderr": 0.024191808600712995 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.49645390070921985, "acc_stderr": 0.02982674915328092, "acc_norm": 0.49645390070921985, "acc_norm_stderr": 0.02982674915328092 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.46936114732724904, "acc_stderr": 0.012746237711716634, "acc_norm": 0.46936114732724904, "acc_norm_stderr": 0.012746237711716634 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.6764705882352942, "acc_stderr": 0.02841820861940676, "acc_norm": 0.6764705882352942, "acc_norm_stderr": 0.02841820861940676 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.6633986928104575, "acc_stderr": 0.019117213911495148, "acc_norm": 0.6633986928104575, "acc_norm_stderr": 0.019117213911495148 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6545454545454545, "acc_stderr": 0.04554619617541054, "acc_norm": 0.6545454545454545, "acc_norm_stderr": 0.04554619617541054 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.746938775510204, "acc_stderr": 0.027833023871399673, "acc_norm": 0.746938775510204, "acc_norm_stderr": 0.027833023871399673 }, "harness|hendrycksTest-sociology|5": { "acc": 0.8308457711442786, "acc_stderr": 0.026508590656233268, "acc_norm": 0.8308457711442786, "acc_norm_stderr": 0.026508590656233268 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.85, "acc_stderr": 0.03588702812826371, "acc_norm": 0.85, "acc_norm_stderr": 0.03588702812826371 }, "harness|hendrycksTest-virology|5": { "acc": 0.5542168674698795, "acc_stderr": 0.03869543323472101, "acc_norm": 0.5542168674698795, "acc_norm_stderr": 0.03869543323472101 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.8245614035087719, "acc_stderr": 0.029170885500727665, "acc_norm": 0.8245614035087719, "acc_norm_stderr": 0.029170885500727665 }, "harness|truthfulqa:mc|0": { "mc1": 0.5850673194614443, "mc1_stderr": 0.017248314465805978, "mc2": 0.7044948614297283, "mc2_stderr": 0.015034716941280082 }, "harness|winogrande|5": { "acc": 0.8926598263614838, "acc_stderr": 0.008699760340699001 }, "harness|gsm8k|5": { "acc": 0.689158453373768, "acc_stderr": 0.012748860507777723 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_zhengr__MixTAO-7Bx2-MoE-Instruct-v6.0
[ "region:us" ]
2024-01-25T07:46:08+00:00
{"pretty_name": "Evaluation run of zhengr/MixTAO-7Bx2-MoE-Instruct-v6.0", "dataset_summary": "Dataset automatically created during the evaluation run of model [zhengr/MixTAO-7Bx2-MoE-Instruct-v6.0](https://huggingface.co/zhengr/MixTAO-7Bx2-MoE-Instruct-v6.0) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_zhengr__MixTAO-7Bx2-MoE-Instruct-v6.0\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-01-25T07:43:54.051731](https://huggingface.co/datasets/open-llm-leaderboard/details_zhengr__MixTAO-7Bx2-MoE-Instruct-v6.0/blob/main/results_2024-01-25T07-43-54.051731.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.6533190243240277,\n \"acc_stderr\": 0.032078440041778726,\n \"acc_norm\": 0.6517191668467992,\n \"acc_norm_stderr\": 0.03277441388111557,\n \"mc1\": 0.5850673194614443,\n \"mc1_stderr\": 0.017248314465805978,\n \"mc2\": 0.7044948614297283,\n \"mc2_stderr\": 0.015034716941280082\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.7090443686006825,\n \"acc_stderr\": 0.013273077865907588,\n \"acc_norm\": 0.7337883959044369,\n \"acc_norm_stderr\": 0.012915774781523205\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.7341167098187612,\n \"acc_stderr\": 0.004408994868650098,\n \"acc_norm\": 0.890161322445728,\n \"acc_norm_stderr\": 0.003120495238827556\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.33,\n \"acc_stderr\": 0.04725815626252605,\n \"acc_norm\": 0.33,\n \"acc_norm_stderr\": 0.04725815626252605\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.6296296296296297,\n \"acc_stderr\": 0.041716541613545426,\n \"acc_norm\": 0.6296296296296297,\n \"acc_norm_stderr\": 0.041716541613545426\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.6973684210526315,\n \"acc_stderr\": 0.03738520676119669,\n \"acc_norm\": 0.6973684210526315,\n \"acc_norm_stderr\": 0.03738520676119669\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.65,\n \"acc_stderr\": 0.0479372485441102,\n \"acc_norm\": 0.65,\n \"acc_norm_stderr\": 0.0479372485441102\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.7245283018867924,\n \"acc_stderr\": 0.027495663683724057,\n \"acc_norm\": 0.7245283018867924,\n \"acc_norm_stderr\": 0.027495663683724057\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.7638888888888888,\n \"acc_stderr\": 0.03551446610810826,\n \"acc_norm\": 0.7638888888888888,\n \"acc_norm_stderr\": 0.03551446610810826\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.5,\n \"acc_stderr\": 0.050251890762960605,\n \"acc_norm\": 0.5,\n \"acc_norm_stderr\": 0.050251890762960605\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.52,\n \"acc_stderr\": 0.050211673156867795,\n \"acc_norm\": 0.52,\n \"acc_norm_stderr\": 0.050211673156867795\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.31,\n \"acc_stderr\": 0.04648231987117316,\n \"acc_norm\": 0.31,\n \"acc_norm_stderr\": 0.04648231987117316\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.6705202312138728,\n \"acc_stderr\": 0.03583901754736411,\n \"acc_norm\": 0.6705202312138728,\n \"acc_norm_stderr\": 0.03583901754736411\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.4215686274509804,\n \"acc_stderr\": 0.04913595201274498,\n \"acc_norm\": 0.4215686274509804,\n \"acc_norm_stderr\": 0.04913595201274498\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.77,\n \"acc_stderr\": 0.04229525846816506,\n \"acc_norm\": 0.77,\n \"acc_norm_stderr\": 0.04229525846816506\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.574468085106383,\n \"acc_stderr\": 0.03232146916224468,\n \"acc_norm\": 0.574468085106383,\n \"acc_norm_stderr\": 0.03232146916224468\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.47368421052631576,\n \"acc_stderr\": 0.046970851366478626,\n \"acc_norm\": 0.47368421052631576,\n \"acc_norm_stderr\": 0.046970851366478626\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.5655172413793104,\n \"acc_stderr\": 0.04130740879555498,\n \"acc_norm\": 0.5655172413793104,\n \"acc_norm_stderr\": 0.04130740879555498\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.41005291005291006,\n \"acc_stderr\": 0.025331202438944423,\n \"acc_norm\": 0.41005291005291006,\n \"acc_norm_stderr\": 0.025331202438944423\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.4603174603174603,\n \"acc_stderr\": 0.04458029125470973,\n \"acc_norm\": 0.4603174603174603,\n \"acc_norm_stderr\": 0.04458029125470973\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.35,\n \"acc_stderr\": 0.04793724854411019,\n \"acc_norm\": 0.35,\n \"acc_norm_stderr\": 0.04793724854411019\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.7967741935483871,\n \"acc_stderr\": 0.02289168798455496,\n \"acc_norm\": 0.7967741935483871,\n \"acc_norm_stderr\": 0.02289168798455496\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.5073891625615764,\n \"acc_stderr\": 0.035176035403610105,\n \"acc_norm\": 0.5073891625615764,\n \"acc_norm_stderr\": 0.035176035403610105\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.69,\n \"acc_stderr\": 0.04648231987117316,\n \"acc_norm\": 0.69,\n \"acc_norm_stderr\": 0.04648231987117316\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.7818181818181819,\n \"acc_stderr\": 0.03225078108306289,\n \"acc_norm\": 0.7818181818181819,\n \"acc_norm_stderr\": 0.03225078108306289\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.803030303030303,\n \"acc_stderr\": 0.028335609732463362,\n \"acc_norm\": 0.803030303030303,\n \"acc_norm_stderr\": 0.028335609732463362\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.9015544041450777,\n \"acc_stderr\": 0.021500249576033456,\n \"acc_norm\": 0.9015544041450777,\n \"acc_norm_stderr\": 0.021500249576033456\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.6743589743589744,\n \"acc_stderr\": 0.02375966576741229,\n \"acc_norm\": 0.6743589743589744,\n \"acc_norm_stderr\": 0.02375966576741229\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.32222222222222224,\n \"acc_stderr\": 0.028493465091028593,\n \"acc_norm\": 0.32222222222222224,\n \"acc_norm_stderr\": 0.028493465091028593\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.6722689075630253,\n \"acc_stderr\": 0.03048991141767323,\n \"acc_norm\": 0.6722689075630253,\n \"acc_norm_stderr\": 0.03048991141767323\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.33774834437086093,\n \"acc_stderr\": 0.038615575462551684,\n \"acc_norm\": 0.33774834437086093,\n \"acc_norm_stderr\": 0.038615575462551684\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.8422018348623853,\n \"acc_stderr\": 0.015630022970092434,\n \"acc_norm\": 0.8422018348623853,\n \"acc_norm_stderr\": 0.015630022970092434\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.5231481481481481,\n \"acc_stderr\": 0.03406315360711507,\n \"acc_norm\": 0.5231481481481481,\n \"acc_norm_stderr\": 0.03406315360711507\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.8480392156862745,\n \"acc_stderr\": 0.025195658428931792,\n \"acc_norm\": 0.8480392156862745,\n \"acc_norm_stderr\": 0.025195658428931792\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.7890295358649789,\n \"acc_stderr\": 0.026558372502661916,\n \"acc_norm\": 0.7890295358649789,\n \"acc_norm_stderr\": 0.026558372502661916\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.695067264573991,\n \"acc_stderr\": 0.030898610882477515,\n \"acc_norm\": 0.695067264573991,\n \"acc_norm_stderr\": 0.030898610882477515\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.7709923664122137,\n \"acc_stderr\": 0.036853466317118506,\n \"acc_norm\": 0.7709923664122137,\n \"acc_norm_stderr\": 0.036853466317118506\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.7768595041322314,\n \"acc_stderr\": 0.03800754475228732,\n \"acc_norm\": 0.7768595041322314,\n \"acc_norm_stderr\": 0.03800754475228732\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.7777777777777778,\n \"acc_stderr\": 0.0401910747255735,\n \"acc_norm\": 0.7777777777777778,\n \"acc_norm_stderr\": 0.0401910747255735\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.7791411042944786,\n \"acc_stderr\": 0.03259177392742178,\n \"acc_norm\": 0.7791411042944786,\n \"acc_norm_stderr\": 0.03259177392742178\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.42857142857142855,\n \"acc_stderr\": 0.04697113923010212,\n \"acc_norm\": 0.42857142857142855,\n \"acc_norm_stderr\": 0.04697113923010212\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.7669902912621359,\n \"acc_stderr\": 0.04185832598928315,\n \"acc_norm\": 0.7669902912621359,\n \"acc_norm_stderr\": 0.04185832598928315\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8888888888888888,\n \"acc_stderr\": 0.020588491316092368,\n \"acc_norm\": 0.8888888888888888,\n \"acc_norm_stderr\": 0.020588491316092368\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.68,\n \"acc_stderr\": 0.04688261722621504,\n \"acc_norm\": 0.68,\n \"acc_norm_stderr\": 0.04688261722621504\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.8237547892720306,\n \"acc_stderr\": 0.013625556907993459,\n \"acc_norm\": 0.8237547892720306,\n \"acc_norm_stderr\": 0.013625556907993459\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.7369942196531792,\n \"acc_stderr\": 0.02370309952525817,\n \"acc_norm\": 0.7369942196531792,\n \"acc_norm_stderr\": 0.02370309952525817\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.4480446927374302,\n \"acc_stderr\": 0.016631976628930595,\n \"acc_norm\": 0.4480446927374302,\n \"acc_norm_stderr\": 0.016631976628930595\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.7189542483660131,\n \"acc_stderr\": 0.025738854797818733,\n \"acc_norm\": 0.7189542483660131,\n \"acc_norm_stderr\": 0.025738854797818733\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.7106109324758842,\n \"acc_stderr\": 0.025755865922632945,\n \"acc_norm\": 0.7106109324758842,\n \"acc_norm_stderr\": 0.025755865922632945\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.7469135802469136,\n \"acc_stderr\": 0.024191808600712995,\n \"acc_norm\": 0.7469135802469136,\n \"acc_norm_stderr\": 0.024191808600712995\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.49645390070921985,\n \"acc_stderr\": 0.02982674915328092,\n \"acc_norm\": 0.49645390070921985,\n \"acc_norm_stderr\": 0.02982674915328092\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.46936114732724904,\n \"acc_stderr\": 0.012746237711716634,\n \"acc_norm\": 0.46936114732724904,\n \"acc_norm_stderr\": 0.012746237711716634\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.6764705882352942,\n \"acc_stderr\": 0.02841820861940676,\n \"acc_norm\": 0.6764705882352942,\n \"acc_norm_stderr\": 0.02841820861940676\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.6633986928104575,\n \"acc_stderr\": 0.019117213911495148,\n \"acc_norm\": 0.6633986928104575,\n \"acc_norm_stderr\": 0.019117213911495148\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6545454545454545,\n \"acc_stderr\": 0.04554619617541054,\n \"acc_norm\": 0.6545454545454545,\n \"acc_norm_stderr\": 0.04554619617541054\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.746938775510204,\n \"acc_stderr\": 0.027833023871399673,\n \"acc_norm\": 0.746938775510204,\n \"acc_norm_stderr\": 0.027833023871399673\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.8308457711442786,\n \"acc_stderr\": 0.026508590656233268,\n \"acc_norm\": 0.8308457711442786,\n \"acc_norm_stderr\": 0.026508590656233268\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.85,\n \"acc_stderr\": 0.03588702812826371,\n \"acc_norm\": 0.85,\n \"acc_norm_stderr\": 0.03588702812826371\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5542168674698795,\n \"acc_stderr\": 0.03869543323472101,\n \"acc_norm\": 0.5542168674698795,\n \"acc_norm_stderr\": 0.03869543323472101\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.8245614035087719,\n \"acc_stderr\": 0.029170885500727665,\n \"acc_norm\": 0.8245614035087719,\n \"acc_norm_stderr\": 0.029170885500727665\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.5850673194614443,\n \"mc1_stderr\": 0.017248314465805978,\n \"mc2\": 0.7044948614297283,\n \"mc2_stderr\": 0.015034716941280082\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.8926598263614838,\n \"acc_stderr\": 0.008699760340699001\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.689158453373768,\n \"acc_stderr\": 0.012748860507777723\n }\n}\n```", "repo_url": "https://huggingface.co/zhengr/MixTAO-7Bx2-MoE-Instruct-v6.0", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_01_25T07_43_54.051731", "path": ["**/details_harness|arc:challenge|25_2024-01-25T07-43-54.051731.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-01-25T07-43-54.051731.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_01_25T07_43_54.051731", "path": ["**/details_harness|gsm8k|5_2024-01-25T07-43-54.051731.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-01-25T07-43-54.051731.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_01_25T07_43_54.051731", "path": ["**/details_harness|hellaswag|10_2024-01-25T07-43-54.051731.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-01-25T07-43-54.051731.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_01_25T07_43_54.051731", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-25T07-43-54.051731.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-25T07-43-54.051731.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-25T07-43-54.051731.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-25T07-43-54.051731.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-25T07-43-54.051731.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-25T07-43-54.051731.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-25T07-43-54.051731.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-25T07-43-54.051731.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-25T07-43-54.051731.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-25T07-43-54.051731.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-25T07-43-54.051731.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-25T07-43-54.051731.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-25T07-43-54.051731.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-25T07-43-54.051731.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-25T07-43-54.051731.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-25T07-43-54.051731.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-25T07-43-54.051731.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-25T07-43-54.051731.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-25T07-43-54.051731.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-25T07-43-54.051731.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-25T07-43-54.051731.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-25T07-43-54.051731.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-25T07-43-54.051731.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-25T07-43-54.051731.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-25T07-43-54.051731.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-25T07-43-54.051731.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-25T07-43-54.051731.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-25T07-43-54.051731.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-25T07-43-54.051731.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-25T07-43-54.051731.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-25T07-43-54.051731.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-25T07-43-54.051731.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-25T07-43-54.051731.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-25T07-43-54.051731.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-25T07-43-54.051731.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-25T07-43-54.051731.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-25T07-43-54.051731.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-25T07-43-54.051731.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-25T07-43-54.051731.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-25T07-43-54.051731.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-25T07-43-54.051731.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-25T07-43-54.051731.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-25T07-43-54.051731.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-25T07-43-54.051731.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-25T07-43-54.051731.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-25T07-43-54.051731.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-25T07-43-54.051731.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-25T07-43-54.051731.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-25T07-43-54.051731.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-25T07-43-54.051731.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-25T07-43-54.051731.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-25T07-43-54.051731.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-25T07-43-54.051731.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-25T07-43-54.051731.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-25T07-43-54.051731.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-25T07-43-54.051731.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-25T07-43-54.051731.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-25T07-43-54.051731.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-25T07-43-54.051731.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-25T07-43-54.051731.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-25T07-43-54.051731.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-25T07-43-54.051731.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-25T07-43-54.051731.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-25T07-43-54.051731.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-25T07-43-54.051731.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-25T07-43-54.051731.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-25T07-43-54.051731.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-25T07-43-54.051731.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-25T07-43-54.051731.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-25T07-43-54.051731.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-25T07-43-54.051731.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-25T07-43-54.051731.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-25T07-43-54.051731.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-25T07-43-54.051731.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-25T07-43-54.051731.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-25T07-43-54.051731.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-25T07-43-54.051731.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-25T07-43-54.051731.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-25T07-43-54.051731.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-25T07-43-54.051731.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-25T07-43-54.051731.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-25T07-43-54.051731.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-25T07-43-54.051731.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-25T07-43-54.051731.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-25T07-43-54.051731.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-25T07-43-54.051731.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-25T07-43-54.051731.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-25T07-43-54.051731.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-25T07-43-54.051731.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-25T07-43-54.051731.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-25T07-43-54.051731.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-25T07-43-54.051731.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-25T07-43-54.051731.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-25T07-43-54.051731.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-25T07-43-54.051731.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-25T07-43-54.051731.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-25T07-43-54.051731.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-25T07-43-54.051731.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-25T07-43-54.051731.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-25T07-43-54.051731.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-25T07-43-54.051731.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-25T07-43-54.051731.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-25T07-43-54.051731.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-25T07-43-54.051731.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-25T07-43-54.051731.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-25T07-43-54.051731.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-25T07-43-54.051731.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-25T07-43-54.051731.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-25T07-43-54.051731.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-25T07-43-54.051731.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-25T07-43-54.051731.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-25T07-43-54.051731.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-25T07-43-54.051731.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-25T07-43-54.051731.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_01_25T07_43_54.051731", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-25T07-43-54.051731.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-25T07-43-54.051731.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_01_25T07_43_54.051731", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-25T07-43-54.051731.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-25T07-43-54.051731.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_01_25T07_43_54.051731", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-25T07-43-54.051731.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-25T07-43-54.051731.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_01_25T07_43_54.051731", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-25T07-43-54.051731.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-25T07-43-54.051731.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_01_25T07_43_54.051731", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-25T07-43-54.051731.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-25T07-43-54.051731.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_01_25T07_43_54.051731", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-25T07-43-54.051731.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-25T07-43-54.051731.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_01_25T07_43_54.051731", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-25T07-43-54.051731.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-25T07-43-54.051731.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_01_25T07_43_54.051731", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-25T07-43-54.051731.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-25T07-43-54.051731.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_01_25T07_43_54.051731", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-25T07-43-54.051731.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-25T07-43-54.051731.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_01_25T07_43_54.051731", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-25T07-43-54.051731.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-25T07-43-54.051731.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_01_25T07_43_54.051731", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-25T07-43-54.051731.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-25T07-43-54.051731.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_01_25T07_43_54.051731", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-25T07-43-54.051731.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-25T07-43-54.051731.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_01_25T07_43_54.051731", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-25T07-43-54.051731.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-25T07-43-54.051731.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_01_25T07_43_54.051731", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-25T07-43-54.051731.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-25T07-43-54.051731.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_01_25T07_43_54.051731", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-25T07-43-54.051731.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-25T07-43-54.051731.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_01_25T07_43_54.051731", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-25T07-43-54.051731.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-25T07-43-54.051731.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_01_25T07_43_54.051731", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-25T07-43-54.051731.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-25T07-43-54.051731.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_01_25T07_43_54.051731", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-25T07-43-54.051731.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-25T07-43-54.051731.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_01_25T07_43_54.051731", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-25T07-43-54.051731.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-25T07-43-54.051731.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_01_25T07_43_54.051731", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-25T07-43-54.051731.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-25T07-43-54.051731.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_01_25T07_43_54.051731", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-25T07-43-54.051731.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-25T07-43-54.051731.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_01_25T07_43_54.051731", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-25T07-43-54.051731.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-25T07-43-54.051731.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_01_25T07_43_54.051731", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-25T07-43-54.051731.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-25T07-43-54.051731.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_01_25T07_43_54.051731", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-25T07-43-54.051731.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-25T07-43-54.051731.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_01_25T07_43_54.051731", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-25T07-43-54.051731.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-25T07-43-54.051731.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_01_25T07_43_54.051731", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-25T07-43-54.051731.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-25T07-43-54.051731.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_01_25T07_43_54.051731", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-25T07-43-54.051731.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-25T07-43-54.051731.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_01_25T07_43_54.051731", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-25T07-43-54.051731.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-25T07-43-54.051731.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_01_25T07_43_54.051731", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-25T07-43-54.051731.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-25T07-43-54.051731.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_01_25T07_43_54.051731", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-25T07-43-54.051731.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-25T07-43-54.051731.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_01_25T07_43_54.051731", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-25T07-43-54.051731.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-25T07-43-54.051731.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_01_25T07_43_54.051731", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-25T07-43-54.051731.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-25T07-43-54.051731.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_01_25T07_43_54.051731", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-25T07-43-54.051731.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-25T07-43-54.051731.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_01_25T07_43_54.051731", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-25T07-43-54.051731.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-25T07-43-54.051731.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_01_25T07_43_54.051731", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-25T07-43-54.051731.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-25T07-43-54.051731.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_01_25T07_43_54.051731", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-25T07-43-54.051731.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-25T07-43-54.051731.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_01_25T07_43_54.051731", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-25T07-43-54.051731.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-25T07-43-54.051731.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_01_25T07_43_54.051731", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-25T07-43-54.051731.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-25T07-43-54.051731.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_01_25T07_43_54.051731", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-25T07-43-54.051731.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-25T07-43-54.051731.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_01_25T07_43_54.051731", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-25T07-43-54.051731.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-25T07-43-54.051731.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_01_25T07_43_54.051731", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-25T07-43-54.051731.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-25T07-43-54.051731.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_01_25T07_43_54.051731", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-25T07-43-54.051731.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-25T07-43-54.051731.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_01_25T07_43_54.051731", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-25T07-43-54.051731.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-25T07-43-54.051731.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_01_25T07_43_54.051731", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-25T07-43-54.051731.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-25T07-43-54.051731.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_01_25T07_43_54.051731", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-25T07-43-54.051731.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-25T07-43-54.051731.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_01_25T07_43_54.051731", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-25T07-43-54.051731.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-25T07-43-54.051731.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_01_25T07_43_54.051731", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-25T07-43-54.051731.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-25T07-43-54.051731.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_01_25T07_43_54.051731", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-25T07-43-54.051731.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-25T07-43-54.051731.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_01_25T07_43_54.051731", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-25T07-43-54.051731.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-25T07-43-54.051731.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_01_25T07_43_54.051731", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-25T07-43-54.051731.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-25T07-43-54.051731.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_01_25T07_43_54.051731", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-25T07-43-54.051731.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-25T07-43-54.051731.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_01_25T07_43_54.051731", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-25T07-43-54.051731.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-25T07-43-54.051731.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_01_25T07_43_54.051731", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-25T07-43-54.051731.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-25T07-43-54.051731.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_01_25T07_43_54.051731", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-25T07-43-54.051731.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-25T07-43-54.051731.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_01_25T07_43_54.051731", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-25T07-43-54.051731.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-25T07-43-54.051731.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_01_25T07_43_54.051731", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-25T07-43-54.051731.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-25T07-43-54.051731.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_01_25T07_43_54.051731", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-25T07-43-54.051731.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-25T07-43-54.051731.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_01_25T07_43_54.051731", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-25T07-43-54.051731.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-25T07-43-54.051731.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_01_25T07_43_54.051731", "path": ["**/details_harness|winogrande|5_2024-01-25T07-43-54.051731.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-01-25T07-43-54.051731.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_01_25T07_43_54.051731", "path": ["results_2024-01-25T07-43-54.051731.parquet"]}, {"split": "latest", "path": ["results_2024-01-25T07-43-54.051731.parquet"]}]}]}
2024-01-25T07:46:30+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of zhengr/MixTAO-7Bx2-MoE-Instruct-v6.0 Dataset automatically created during the evaluation run of model zhengr/MixTAO-7Bx2-MoE-Instruct-v6.0 on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-01-25T07:43:54.051731(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of zhengr/MixTAO-7Bx2-MoE-Instruct-v6.0\n\n\n\nDataset automatically created during the evaluation run of model zhengr/MixTAO-7Bx2-MoE-Instruct-v6.0 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-25T07:43:54.051731(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of zhengr/MixTAO-7Bx2-MoE-Instruct-v6.0\n\n\n\nDataset automatically created during the evaluation run of model zhengr/MixTAO-7Bx2-MoE-Instruct-v6.0 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-25T07:43:54.051731(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ 6, 205, 68, 4, 40, 29, 3, 4, 9, 6, 5, 7, 4, 7, 10, 9, 5, 9, 8, 10, 46, 8, 7, 10, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of zhengr/MixTAO-7Bx2-MoE-Instruct-v6.0\n\n\n\nDataset automatically created during the evaluation run of model zhengr/MixTAO-7Bx2-MoE-Instruct-v6.0 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2024-01-25T07:43:54.051731(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):## Dataset Details### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:## Uses### Direct Use### Out-of-Scope Use## Dataset Structure## Dataset Creation### Curation Rationale### Source Data#### Data Collection and Processing#### Who are the source data producers?### Annotations [optional]#### Annotation process#### Who are the annotators?#### Personal and Sensitive Information## Bias, Risks, and Limitations### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:## Glossary [optional]" ]
[ -0.08561321347951889, 0.1464323103427887, -0.004908473696559668, 0.024717053398489952, 0.05861730873584747, -0.03862692043185234, -0.01134990993887186, 0.11877989768981934, 0.0007460594060830772, 0.1750444769859314, -0.028328005224466324, 0.08246802538633347, 0.08963996171951294, 0.19410696625709534, 0.011433466337621212, -0.148602694272995, 0.02458592690527439, -0.07191400229930878, 0.07643260806798935, 0.07361473143100739, 0.09234075248241425, -0.10383914411067963, 0.06634165346622467, -0.0013840894680470228, -0.023361222818493843, -0.0026180632412433624, -0.08787468820810318, -0.04133022949099541, 0.08781246840953827, 0.0785846933722496, 0.008257195353507996, -0.01433695387095213, -0.004982346203178167, -0.2376057505607605, 0.023601246997714043, 0.0771043598651886, 0.026045721024274826, 0.07596692442893982, 0.13533058762550354, -0.08718746900558472, 0.11616943776607513, -0.08559214323759079, 0.029179871082305908, 0.05318976566195488, -0.10864141583442688, -0.09483728557825089, -0.18443775177001953, 0.026687391102313995, 0.07079062610864639, 0.05062325298786163, -0.023719020187854767, 0.11785168200731277, 0.0055040279403328896, 0.030311372131109238, 0.18626141548156738, -0.1511387974023819, -0.025250550359487534, -0.014832287095487118, 0.05984612926840782, 0.03625130280852318, -0.12654724717140198, -0.018343545496463776, 0.024653444066643715, 0.030771663412451744, -0.010742062702775002, 0.018532181158661842, 0.0310796108096838, 0.017117030918598175, -0.142412006855011, -0.07145857065916061, 0.12724582850933075, 0.014915743842720985, -0.06408476829528809, -0.13374590873718262, -0.035306166857481, -0.07889395952224731, -0.01871860958635807, -0.006349377799779177, 0.006335760001093149, -0.018630411475896835, 0.06812793761491776, -0.029068801552057266, -0.09379875659942627, -0.012057189829647541, -0.03299607336521149, 0.08103503286838531, 0.018844466656446457, 0.003152597928419709, 0.0012277665082365274, 0.11292437463998795, 0.06200680509209633, -0.09317082911729813, -0.13073092699050903, -0.05390617623925209, -0.11125756055116653, -0.04724257066845894, 0.014640549197793007, -0.06796060502529144, 0.03577888384461403, 0.23645883798599243, -0.06516192853450775, 0.0202905535697937, -0.08792690187692642, -0.0006143524660728872, 0.10591238737106323, 0.07003992795944214, -0.006815800908952951, -0.07812286913394928, -0.0005596207920461893, 0.034319743514060974, 0.030659982934594154, -0.018361771479249, 0.04039212316274643, 0.053223516792058945, 0.05319132283329964, 0.12348166108131409, 0.10700152069330215, 0.0127210458740592, -0.051562417298555374, -0.02558780275285244, 0.1545928716659546, -0.18238617479801178, -0.001931103877723217, 0.01216200552880764, -0.06665562093257904, -0.09678096324205399, 0.053648415952920914, -0.007634259294718504, -0.06417404860258102, 0.0864683985710144, -0.06980329751968384, -0.052993208169937134, -0.0831080824136734, -0.04299008846282959, 0.05085303261876106, -0.010921761393547058, -0.05263323709368706, -0.06145089119672775, -0.11242590844631195, -0.09154176712036133, -0.008493549190461636, -0.07480039447546005, 0.0031571078579872847, 0.034633561968803406, -0.008705989457666874, -0.01883183792233467, -0.020213957875967026, 0.14022397994995117, -0.08299330621957779, 0.02946034073829651, -0.005758218467235565, -0.011219162493944168, 0.07535519450902939, 0.04289829358458519, -0.12997333705425262, 0.07735221087932587, -0.03433043137192726, 0.10244932025671005, -0.09005670994520187, -0.004833789076656103, -0.1338014453649521, -0.009229376912117004, -0.05334850400686264, 0.008007790893316269, 0.012419233098626137, 0.10291004180908203, -0.22990719974040985, 0.003439993131905794, 0.11802839487791061, -0.11258786916732788, -0.09626127034425735, 0.031402040272951126, -0.02185160480439663, 0.04468575492501259, 0.059966493397951126, 0.06093057617545128, 0.10828274488449097, -0.07071228325366974, -0.1348823755979538, -0.11091826856136322, -0.0058411299251019955, 0.12425248324871063, 0.05798106640577316, -0.05613928288221359, 0.14656232297420502, 0.03008013777434826, 0.010584651492536068, -0.04456113278865814, -0.029942620545625687, -0.04986948519945145, -0.010099013336002827, -0.04777616634964943, -0.0977281928062439, -0.0019124966347590089, -0.05436522886157036, -0.034529417753219604, -0.0749044269323349, 0.0453033521771431, 0.10309014469385147, 0.01514348853379488, 0.017831068485975266, -0.10187198966741562, 0.07641589641571045, 0.015133719891309738, 0.012194638140499592, -0.2320411652326584, -0.10666906088590622, 0.03266581892967224, -0.11060281842947006, 0.02232922799885273, 0.01694171316921711, 0.008273170329630375, 0.04999520257115364, 0.01741618849337101, 0.012128526344895363, -0.004640515428036451, -0.0026076692156493664, -0.022554028779268265, -0.13065700232982635, -0.06098739430308342, -0.0741761326789856, 0.0697571188211441, -0.1592235267162323, -0.033306416124105453, 0.12651079893112183, 0.19133438169956207, 0.026416899636387825, -0.10341469943523407, 0.06564211845397949, -0.0028729811310768127, -0.05294940993189812, -0.0633942037820816, 0.005434660241007805, -0.006412455812096596, 0.06651829183101654, 0.062477581202983856, -0.1794324517250061, -0.11271985620260239, 0.06391483545303345, 0.15136462450027466, -0.038842372596263885, -0.07234621047973633, -0.12020163983106613, -0.04302822798490524, -0.0829782485961914, -0.0539030022919178, 0.06468904763460159, 0.07066860049962997, 0.0347098633646965, -0.0603475421667099, -0.09744738787412643, -0.0031391370575875044, 0.07491444796323776, -0.07082109153270721, 0.10072597861289978, 0.044866375625133514, -0.14915689826011658, 0.09601762145757675, -0.0014012216124683619, 0.1491478681564331, 0.12642738223075867, -0.011095663532614708, -0.12176442891359329, 0.0037225820124149323, 0.04952411726117134, 0.04379715770483017, 0.0862693265080452, 0.0039042066782712936, 0.014240899123251438, 0.07301371544599533, -0.007895972579717636, 0.05862346664071083, -0.055154427886009216, 0.048381004482507706, 0.026434650644659996, -0.006123733706772327, 0.06805406510829926, 0.04110346734523773, 0.015824539586901665, 0.07593327760696411, 0.03630819544196129, 0.1098635271191597, -0.03317992016673088, -0.046316150575876236, -0.10177654772996902, 0.13045264780521393, -0.0831037387251854, -0.22685249149799347, -0.1544933170080185, -0.06382962316274643, -0.026386456564068794, -0.014278627932071686, 0.04621926322579384, 0.022742100059986115, -0.08800073713064194, -0.12167074531316757, 0.049644242972135544, 0.043449562042951584, -0.07908127456903458, -0.022109385579824448, 0.023818667978048325, 0.01504245214164257, -0.17158611118793488, 0.03349937126040459, 0.01288588810712099, -0.038606058806180954, -0.034844767302274704, 0.09250263124704361, 0.1282636970281601, 0.045648228377103806, 0.0405556783080101, -0.015225720591843128, -0.0066513558849692345, 0.19089457392692566, -0.08882952481508255, 0.028709903359413147, 0.1334034651517868, -0.04502076283097267, 0.0544787161052227, 0.1080516129732132, -0.00849081389605999, -0.08562469482421875, 0.03153316304087639, 0.09722979366779327, -0.05611346289515495, -0.2580941617488861, -0.0761633813381195, -0.04868346080183983, -0.02842768095433712, 0.09686712175607681, 0.10397275537252426, -0.0021663703955709934, -0.007114110980182886, -0.13169434666633606, -0.05536448583006859, -0.05034724995493889, 0.05652766302227974, 0.03585395589470863, 0.02420055866241455, 0.04250873625278473, -0.05120605602860451, 0.05339919030666351, 0.1178814247250557, 0.0017297975718975067, 0.15563930571079254, -0.058391496539115906, 0.15889298915863037, 0.09398436546325684, 0.08746561408042908, -0.007176591549068689, 0.07484854012727737, -0.013248980976641178, 0.057878319174051285, 0.011931066401302814, -0.09832615405321121, -0.05377526581287384, 0.08294440805912018, 0.0034544793888926506, -0.007242236286401749, 0.04344355687499046, -0.04667625203728676, 0.06694736331701279, 0.19321581721305847, 0.0017874486511573195, -0.15233612060546875, -0.0962294414639473, 0.0392165407538414, -0.03818940743803978, -0.09142960608005524, -0.030800633132457733, 0.059921007603406906, -0.12316489964723587, 0.04158850014209747, -0.04407849535346031, 0.08352167159318924, -0.10715912282466888, -0.024441082030534744, 0.005529636982828379, 0.08452317863702774, -0.017036404460668564, 0.09075113385915756, -0.12261976301670074, 0.07860344648361206, -0.006142022088170052, 0.06507304310798645, -0.07637562602758408, 0.05897577852010727, 0.0043057650327682495, -0.06732843071222305, 0.11316333711147308, 0.0048211426474153996, -0.11682429909706116, -0.03257143497467041, -0.12306179106235504, 0.002745342208072543, 0.05316162109375, -0.1147015392780304, 0.11026011407375336, 0.005013698246330023, -0.020400673151016235, -0.042182937264442444, -0.03328346088528633, -0.1192956268787384, -0.20344184339046478, 0.12519524991512299, -0.13351093232631683, 0.07662200927734375, -0.07506886124610901, -0.05694307014346123, -0.011287938803434372, 0.19104355573654175, -0.11642568558454514, -0.05832108482718468, -0.1482403725385666, 0.09246601164340973, 0.16928519308567047, -0.06250545382499695, 0.06886844336986542, -0.029169240966439247, 0.2169881910085678, -0.003435227321460843, -0.06893522292375565, -0.010870499536395073, -0.07694111764431, -0.18938744068145752, -0.03196509927511215, 0.15563078224658966, 0.06057644635438919, -0.0045394147746264935, 0.004199187736958265, 0.05521615222096443, 0.012096635065972805, -0.08735720068216324, 0.05348331108689308, 0.0758601501584053, 0.09366010874509811, -0.002478131093084812, -0.0450105182826519, -0.11794518679380417, -0.10461900383234024, -0.09805139154195786, 0.07442699372768402, 0.1442325860261917, -0.03495335578918457, 0.13083241879940033, 0.07845531404018402, -0.09596161544322968, -0.1720878928899765, -0.0453781820833683, 0.02734915167093277, 0.006003544200211763, 0.0779251679778099, -0.1675136685371399, 0.07298686355352402, 0.09009917825460434, -0.024024760350584984, 0.17201676964759827, -0.23137468099594116, -0.140910804271698, 0.020217150449752808, 0.033991504460573196, -0.1715778261423111, -0.14183996617794037, -0.1275012344121933, -0.008450017310678959, -0.13288091123104095, 0.14007635414600372, 0.011700431816279888, 0.034060146659612656, -0.01913163810968399, 0.018589435145258904, 0.04206802695989609, -0.048042185604572296, 0.12213993072509766, -0.01807229407131672, 0.011386280879378319, -0.09473200142383575, -0.028651069849729538, -0.013034176081418991, -0.06216292455792427, 0.051184333860874176, 0.03203524276614189, 0.06386697292327881, -0.1065313071012497, -0.027638722211122513, -0.045550428330898285, 0.08057675510644913, -0.05526110902428627, -0.036171406507492065, -0.07205493003129959, 0.08532232791185379, 0.06306100636720657, -0.008603586815297604, 0.08055221289396286, -0.040635645389556885, 0.05158349499106407, 0.17340058088302612, 0.04942266643047333, 0.023487873375415802, -0.07753924280405045, -0.0377454049885273, 0.012893919833004475, -0.010711553506553173, -0.09313816577196121, 0.04100370407104492, 0.11029628664255142, 0.05248406156897545, 0.07740553468465805, -0.014453074894845486, -0.2119193822145462, 0.011965184472501278, 0.10253218561410904, -0.12284354120492935, -0.15590372681617737, 0.007417414803057909, 0.14775240421295166, -0.11210082471370697, -0.04268597066402435, 0.10135531425476074, 0.026131892576813698, -0.03477752208709717, 0.005175210069864988, 0.06827418506145477, 0.04929574579000473, 0.10666318237781525, -0.01451937947422266, 0.055093731731176376, -0.06885162740945816, 0.09977894276380539, 0.11703582108020782, -0.06249012053012848, -0.00044232289656065404, 0.12033608555793762, -0.06342924386262894, -0.04427807033061981, -0.007579974364489317, 0.03004300408065319, -0.0026688699144870043, -0.03184673190116882, -0.006825796328485012, -0.050189632922410965, 0.08264946937561035, 0.20191211998462677, -0.010271836072206497, 0.05974419787526131, 0.0320083424448967, -0.005101210903376341, -0.03730452060699463, 0.10817377269268036, 0.02914969064295292, 0.04588792100548744, -0.02248937264084816, 0.028292933478951454, 0.014936188235878944, -0.00409052986651659, 0.008951017633080482, -0.04513987526297569, -0.02624976634979248, -0.02462812140583992, -0.16625083982944489, 0.01695355586707592, -0.0853935033082962, -0.03629130870103836, -0.023069962859153748, -0.051231738179922104, -0.03594910353422165, 0.0412602461874485, -0.06101773679256439, -0.07094269245862961, -0.07572725415229797, 0.09393573552370071, -0.19877830147743225, 0.013420198112726212, 0.07908692210912704, -0.08548875898122787, 0.0867118090391159, 0.03231445327401161, 0.008296488784253597, 0.02217864617705345, -0.10248570144176483, -0.03491957113146782, -0.015895556658506393, 0.035937219858169556, 0.04223299026489258, -0.1528349369764328, 0.0019237914821133018, 0.01964667998254299, -0.07352861016988754, -0.03098505176603794, 0.0368620939552784, -0.15278953313827515, -0.003715676022693515, 0.050598468631505966, -0.0046725706197321415, -0.026634907349944115, 0.03142022341489792, 0.06584629416465759, 0.02460910752415657, 0.09425235539674759, -0.0009959471644833684, 0.026853296905755997, -0.16760441660881042, -0.024144072085618973, -0.005851729307323694, -0.025099940598011017, 0.04373469576239586, 0.030908407643437386, 0.03772491216659546, -0.015046323649585247, 0.1801101565361023, -0.03750498592853546, 0.028487663716077805, 0.06476040184497833, -0.02095642499625683, -0.088764488697052, 0.04587077349424362, 0.08154167234897614, 0.028606455773115158, 0.028869753703475, 0.03635457903146744, -0.050546884536743164, -0.030122077092528343, -0.02559167891740799, 0.11336245387792587, 0.15476998686790466, 0.1755884289741516, 0.0006945916684344411, 0.0630100667476654, -0.17571815848350525, -0.05075526610016823, 0.007903268560767174, -0.055505745112895966, 0.034378111362457275, -0.05936501920223236, 0.07187412679195404, 0.10710018873214722, -0.12408685684204102, 0.08385858684778214, -0.0593651719391346, -0.02317347563803196, -0.0170250553637743, -0.14153850078582764, -0.035265643149614334, 0.03149552270770073, 0.004222739953547716, -0.08907553553581238, 0.10621354728937149, 0.09935487061738968, 0.00302224257029593, 0.0010638268431648612, 0.09614215791225433, -0.1086619645357132, -0.08548036962747574, -0.009703375399112701, 0.008905638940632343, 0.021539848297834396, 0.025753449648618698, 0.04438304528594017, 0.01772051490843296, 0.071929432451725, 0.08547412604093552, 0.07658429443836212, 0.06712684035301208, 0.04514825716614723, -0.018009351566433907, -0.05706190690398216, 0.022468630224466324, -0.022667713463306427, -0.03253979608416557, 0.1536034643650055, 0.05143861100077629, 0.04040750861167908, 0.01478685811161995, 0.22492913901805878, 0.029185235500335693, -0.048372626304626465, -0.12104246020317078, 0.07336615771055222, 0.05103989318013191, 0.004730424843728542, 0.028500812128186226, -0.14556673169136047, 0.03703932836651802, 0.16433727741241455, 0.0647207722067833, 0.046014610677957535, 0.010403510183095932, 0.03623241186141968, 0.027048049494624138, -0.016992054879665375, 0.009789408184587955, 0.05736029893159866, 0.17767491936683655, -0.030828673392534256, 0.05255354195833206, -0.02052813395857811, -0.027117159217596054, -0.0012151916744187474, 0.13165034353733063, -0.05389814451336861, 0.03235271945595741, -0.06503107398748398, 0.07666651904582977, -0.0683118924498558, -0.3234871029853821, 0.012753542512655258, -0.09777004271745682, -0.15756943821907043, -0.00710676796734333, 0.033355895429849625, -0.03133454546332359, 0.033399224281311035, 0.05200802907347679, -0.03386710211634636, 0.12946321070194244, 0.01676591858267784, -0.0649040937423706, -0.07826026529073715, 0.09766393899917603, -0.04590502753853798, 0.25494831800460815, -0.02105404995381832, 0.006889154203236103, 0.09236428886651993, -0.003476538695394993, -0.1930730640888214, 0.018045006319880486, 0.07000385224819183, -0.07588440179824829, 0.04486057534813881, 0.183098703622818, -0.012297788634896278, 0.11378619819879532, 0.04874356836080551, 0.005368590820580721, 0.03983602300286293, 0.0393948070704937, 0.01583511009812355, -0.06775284558534622, 0.0539977066218853, -0.07710793614387512, 0.14307670295238495, 0.11623192578554153, -0.03744266927242279, 0.013712756335735321, -0.07343238592147827, 0.0742114782333374, -0.034760285168886185, 0.11419263482093811, -0.001762148691341281, -0.1895284801721573, 0.056666795164346695, 0.04813378304243088, 0.04640116170048714, -0.1820763349533081, -0.0521487332880497, 0.088156558573246, -0.05524760112166405, -0.007977391593158245, 0.12516434490680695, 0.039253778755664825, 0.009287437424063683, -0.05929124355316162, -0.06672608107328415, 0.011583831161260605, 0.09252358227968216, -0.09808280318975449, -0.03661150112748146 ]
06001af1ff4c261f316c3927f5c3165ebd86cdc6
# Dataset Card for websight-5K-multimodal This dataset has been created with [Argilla](https://docs.argilla.io). It is a subset of 5000 records from the [Websight](https://huggingface.co/datasets/HuggingFaceM4/WebSight?row=0) collection, which is used for HTML/CSS code generation from an input image. Below you can see a screenshot of the UI from where annotators can work comfortably. ![image/png](https://cdn-uploads.huggingface.co/production/uploads/6420817bf837b31c1cfced50/TzdrHEzgMaFx0C0YyUH-l.png) As shown in the sections below, this dataset can be loaded into Argilla as explained in [Load with Argilla](#load-with-argilla), or used directly with the `datasets` library in [Load with `datasets`](#load-with-datasets). ## Dataset Description - **Homepage:** https://argilla.io - **Repository:** https://github.com/argilla-io/argilla - **Paper:** - **Leaderboard:** - **Point of Contact:** ### Dataset Summary This dataset contains: * A dataset configuration file conforming to the Argilla dataset format named `argilla.yaml`. This configuration file will be used to configure the dataset when using the `FeedbackDataset.from_huggingface` method in Argilla. * Dataset records in a format compatible with HuggingFace `datasets`. These records will be loaded automatically when using `FeedbackDataset.from_huggingface` and can be loaded independently using the `datasets` library via `load_dataset`. * The [annotation guidelines](#annotation-guidelines) that have been used for building and curating the dataset, if they've been defined in Argilla. ### Load with Argilla To load with Argilla, you'll just need to install Argilla as `pip install argilla --upgrade` and then use the following code: ```python import argilla as rg ds = rg.FeedbackDataset.from_huggingface("argilla/websight-5K-multimodal") ``` ### Load with `datasets` To load this dataset with `datasets`, you'll just need to install `datasets` as `pip install datasets --upgrade` and then use the following code: ```python from datasets import load_dataset ds = load_dataset("argilla/websight-5K-multimodal") ``` ### Supported Tasks and Leaderboards This dataset can contain [multiple fields, questions and responses](https://docs.argilla.io/en/latest/conceptual_guides/data_model.html#feedback-dataset) so it can be used for different NLP tasks, depending on the configuration. The dataset structure is described in the [Dataset Structure section](#dataset-structure). There are no leaderboards associated with this dataset. ### Languages [More Information Needed] ## Dataset Structure ### Data in Argilla The dataset is created in Argilla with: **fields**, **questions**, **suggestions**, **metadata**, **vectors**, and **guidelines**. The **fields** are the dataset records themselves, for the moment just text fields are supported. These are the ones that will be used to provide responses to the questions. | Field Name | Title | Type | Required | Markdown | | ---------- | ----- | ---- | -------- | -------- | | image | Image | text | True | True | | html_code | Html_code | text | True | True | The **questions** are the questions that will be asked to the annotators. They can be of different types, such as rating, text, label_selection, multi_label_selection, or ranking. | Question Name | Title | Type | Required | Description | Values/Labels | | ------------- | ----- | ---- | -------- | ----------- | ------------- | | accuracy | Assess if the generated code accurately reflects the layout, design elements, and style shown in the image. | rating | True | N/A | [1, 2, 3, 4, 5, 6, 7] | | quality | Assess the generated code for cleanliness, efficiency, and proper use of HTML/CSS practices. | multi_label_selection | True | N/A | ['clean code', 'efficient', 'proper tags and classes'] | | correction | Identify any errors or issues in the generated HTML/CSS code and suggest possible corrections. | text | True | N/A | N/A | The **suggestions** are human or machine generated recommendations for each question to assist the annotator during the annotation process, so those are always linked to the existing questions, and named appending "-suggestion" and "-suggestion-metadata" to those, containing the value/s of the suggestion and its metadata, respectively. So on, the possible values are the same as in the table above, but the column name is appended with "-suggestion" and the metadata is appended with "-suggestion-metadata". The **metadata** is a dictionary that can be used to provide additional information about the dataset record. This can be useful to provide additional context to the annotators, or to provide additional information about the dataset record itself. For example, you can use this to provide a link to the original source of the dataset record, or to provide additional information about the dataset record itself, such as the author, the date, or the source. The metadata is always optional, and can be potentially linked to the `metadata_properties` defined in the dataset configuration file in `argilla.yaml`. | Metadata Name | Title | Type | Values | Visible for Annotators | | ------------- | ----- | ---- | ------ | ---------------------- | The **guidelines**, are optional as well, and are just a plain string that can be used to provide instructions to the annotators. Find those in the [annotation guidelines](#annotation-guidelines) section. ### Data Instances An example of a dataset instance in Argilla looks as follows: ```json { "external_id": null, "fields": { "html_code": "```json\n\u003chtml\u003e\n\u003cstyle\u003e\nbody {\n font-family: Arial, sans-serif;\n margin: 0;\n padding: 0;\n border-box;\n}\n\nheader {\n background: #ff0;\n text-align: center;\n padding: 20px;\n}\n\n#video-container {\n width: 70%;\n margin: auto;\n text-align: center;\n}\n\n#video-player {\n width: 100%;\n height: 300px;\n background: #f0f;\n}\n\n#chef-tips {\n margin: 40px;\n}\n\n#chef-tips p {\n color: #000;\n line-height: 1.6;\n}\n\nfooter {\n background: #ff0;\n text-align: center;\n padding: 20px;\n position: fixed;\n width: 100%;\n bottom: 0;\n}\n\u003c/style\u003e\n\u003cbody\u003e\n \u003cheader\u003e\n \u003ch1\u003eInter\u0027national\u003c/h1\u003e\n \u003c/header\u003e\n \u003cmain\u003e\n \u003csection id=\"video-container\"\u003e\n \u003cvideo id=\"video-player\" src=\"#\" controls\u003e\u003c/video\u003e\n \u003c/section\u003e\n \u003csection id=\"chef-tips\"\u003e\n \u003ch2\u003eChef\u0027s Tips\u003c/h2\u003e\n \u003cp\u003eEnjoy the tasty and healthy recipes shared by the best internationally recognized chefs. Discover the latest cooking trends and techniques.\u003c/p\u003e\n \u003c/section\u003e\n \u003c/main\u003e\n \u003cfooter\u003e\n \u003cp\u003e\u00a9 2022 Inter\u0027national. All rights reserved.\u003c/p\u003e\n \u003c/footer\u003e\n \u003c/body\u003e\n\u003c/html\u003e\n```", "image": "\u003cimg src=\"data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAABQAAAALQCAYAAADPfd1WAAB/xElEQVR4nOzdeZxWdd0//vc1DCDgIIsOLlgJBoqJicKN4ZalpoXeZlpf03JJU2/3cskyl8rd8q4009JSbzUx09xSzCUjd1NTFPcCFZBlYAaGZWbO74/zu66ZaxbmAmYYOPN8Ph7X48xZrnM+51xnua7XfM755JIkkgAAAAAAMqmsqwsAAAAAAHQeASAAAAAAZJgAEAAAAAAyTAAIAAAAABkmAAQAAACADBMAAgAAAECGCQABAAAAIMMEgAAAAACQYQJAAAAAAMgwASAAAAAAZJgAEAAAAAAyTAAIAAAAABkmAAQAAACADBMAAgAAAECGCQABAAAAIMMEgAAAAACQYQJAAAAAAMgwASAAAAAAZJgAEAAAAAAyTAAIAAAAABkmAAQAAACADBMAAgAAAECGCQABAAAAIMMEgAAAAACQYQJAAAAAAMgwASAAAAAAZJgAEAAAAAAyTAAIAAAAABkmAAQAAACADBMAAgAAAECGCQABAAAAIMMEgAAAAACQYQJAAAAAAMgwASAAAAAAZJgAEAAAAAAyTAAIAAAAABkmAAQAAACADBMAAgAAAECGCQABAAAAIMMEgAAAAACQYQJAAAAAAMgwASAAAAAAZJgAEAAAAAAyTAAIAAAAABkmAAQAAACADBMAAgAAAECGlXd1AQCA7uPVVyM++qh42NixEf36dU15utqiRRHPPls8rF+/dJtkxVtvRcyY0dify0VMmBBR3s2+hT73XERNTWN/RUXEDjt0XXkAgO4llySRdHUhAIDu4QtfiHjwweJhTz4ZMX78mln+lCkRf/xjxE9/umaW156nnorYaafiYZWVEbNmdU15VkVDQ8TNN0fU1kZ8+9stxx9zTMR11xUPmzEjYrPN1kz51habb14chA4bFvH2211XHgCge3ELMACQedOnR3z96xE779wygGTVPfVUGt5+85sR06Z1dWkAAGhLN7v5AgDobi69NOLMM7u6FNlSXR1x4okRv/99V5cEAIBSqAEIAGTaj3/c1SXInunThX8AAOsSNQABALrIkCEth33842u+HJ3p4osjzjijsT+Xi9h0064rDwBAdyQABADWKfPnRzz/fNqK6qc+1bktCH/4YdpycZ8+EaNGRQwcuOrzWr48bfG3tjZixIi0UYhPfCJdj+rqxulGj165+dbWRvzznxFLl0Zsu23Ehhuu3Hs//DBi5syIefPSbTloUMQmm6SNkXSEQYPS18qaPz/iP/+J+OCDiA02iNh669Xb/nn19RGvvZbWYtx66zRwzeVKf//s2en2mjkzndfAgelr2LCInj1Xv3wAAJ3BLcAAwFpl+PA0kMm/LrssIknS7jbbpGHSnnumjU+sv37EuHFpANbckCHp+5uGaxERU6c2znvnnVu+b9GiiFNPTd+/6abpsnbeOV3u5pun4xYtar3sCxcWlz2Xi7j77oirr47o1StiwoSIz38+4mMfS1tErq9v2QLyNtsU9x93XPH8vvSldPh996Xl6ts3ne8ee0RstFFaxttuW/E2njYt4vjj0/cOH56+f+LEdB6f/nS67jvs0HI+r7+elqF5GSMifvazxjKec07j8FNPbblN2mrlOEnSVpq/8IV0e3/60xH77puWL7/9v/WtiI8+anvd/u//ipfVv39EXV26zvvtl4Z1226bzneLLdJw8fvfTwPattTVRfzudxHbbZdum+22i9h773QeO+0UsdVWEYMHp2X78MO25wMA0FUEgADAWqWmprh/3ryIb3wjvY106tSW0z/7bMSYMRGTJhUPr61tf1nz5xf3//Of6byuvDKt6dXcjBnpuDFjIl56qeX4+vqWw37724j/+Z+Wwz/6KKK8PA3amtp66+L+5mHjwoURv/hFGgROmdJ6Gf/f/4s47bSW4yLS2pNbbRXxq1+1Pj7vhRfS+Zx6auOwZctW/J7WytxaWFpX13LY/PkRBx4Y8ZWvtN1S84wZ6fYcPjzizjtbn2bp0uL+6uqIxx6LGDs24p57WgbC1dURF14Yseuure8z9fURRx4ZccQRES+/3Poy8/P57W8jRo6MeOuttqcDAOgKAkAAYK128cURN9/c/nQnnFB6QNWaWbMidtst4o032p/2jTfS2mkzZrQ/7T33tD78iCPS7imnRDz9dONr111XPL8nnog46aT2l/uzn7UMTBcujPjsZ9t/b1NXXpmGgZ1p2bK0Rt2f/lTa9NXVaVj461+XNv2ee7YM/pp76qnW97PLL4+46abSlpMv23e/W/r0AABrggAQAFhnfO1rEeeem9bAa2727OKaY5tv3vZz7Cor01fTBjfOOadlSFRREXH22WkNsdaezXfJJSu/Dnlf/nLaHTIkvY05/+rbt/R57LFHxPnnR3zxi62Pv/764v6bbmq5jpWV6e3Av/xlxJlnpuvc3OTJabdPn3T61qbJz6uyMr0ddmWcd15ak7O5iop03XbZpfX3HXts+pzAUo0enX6eRx7Z+vhf/rK4f8mSiJ/8pOV0e+8dcdFFET/9aRouNnf33asXRgMAdDSNgAAAa72KirThhs02S/vPPDN9dl7zWzKnTUufZReRNt4RkT4DrmnoNWpU47i8f/4z4rrriocNG5beYrvxxmn/aadFHHRQcY2+X/4y4jvfSRvzWJHKyojf/Ca93ffhhyPefXf1W8J94IH0WXl5xx0Xcc01xdM0rwH49NMt5/O3v6W3reZttVVj7cS8999Pu5/8ZFpTcurUls8BPPXUNBBbWXPmpGFac4cfHnHVVY2B6LRpaWjafJ2OPTbi/vvbX87ZZxeHeXvumd7i3NTLL0c0NESUlTX29+lTvP8ceWR6q2/eySenjaY0v2V83rzGfQcAoKupAQgArPVuvrkx/ItIQ5nTT2853cKFqzb/Rx5pOezCC4sDnN69I37+85bT3Xpr+/P/3/9Ng8lNN02fZ3juuatWzrzTTisO/yLSULS55s84vPHGtLGL119PG9t47LHi8C8iYvfdW86nqmo1CtuO1hosGTs2Ddma1oYcOTKtWdfcAw+0/rzG5vM7//ziYV/7WsTQoS2nXbKk8e9x49LAc+7c9Nbrm25KbwluqqwsYv/9W86nvVuOAQDWJDUAAYC1Xmu3WQ4b1nJY8wYgStXac/+GD09rpzW1/vppbcSm4c706e3Pf7/9Vq1cbWntlt+PfazlsNZuQy0vT8O0fPC3YEH6jL/nnot4/PG0deHmmoZiHe2xx1oOO//8xlp4TW25ZcRRRxXXwItIa4e2dbt3RLq9ylv51rv11i2f47h0acvbsAcNSltc3nnntIbgtGnp9nrmmYh77414552W8y6lERoAgDVFAAgArNUqK9Maf821NmxVvfZay2Fjx5b23vYaAhk6dOWe61eKzTdvOay1wKwt06alNe/uvHPFLdvm5XKlz3tlvflmy2Hbbtv29Ntv33LYK6+kDbi0pbVwNKL0z6W2Ng1Gb7st4qGHSqvd16NHafMGAFgTBIAAwFpt/fVbH96rV8ctY9q0VX/vv/+94vFbbLHq825LW+Fn89qJrbnhhrYbwWhLa7XnOkprtQuHDGl7+taeq9deQyBtba/evVf8voiIjz5KaxC21kjJiqxMIAsA0Nl8NQEA1mrrrdf68I6slbY6jTXU1Kx4fEfX/osoLbhqzdVXtx3+7blnxAUXtH5LbmeGWYMGtRy2omf6zZzZclh7IWtb+1B76zV/ftrYTGvh39ChEUcfnQaqxx238vMGAFiT1AAEALq9kSNb3go7a9aKnytXqrbCp9WxquHn//5vy2HXXRdxyCGNQWV9fcctrxSbbx7x1FPFw159tbjRl6Zau2W5eYvEza1q+R9+uOXz/XbfPW2deNSoxmEffNDyvQJAAGBt4qsJANCt1NW1HNZagyJ/+1vLYe+9F/Hd76YB0F//GvH++xFJsuLldeStyqvjP/9p2djJMcdEfOtbxbUU3323/Xm1Fm4tX75q5Ro3ruWwc89tfbu++27Etde2HL711qu27PY8/HDLYTffXBz+RbT+HEMAgLWJABAAyLTmz3/78MPG1oLzreR+6Ust33fqqenz35r67ncjrrgi4oQTIj7/+fQ20BNOWPHy15bGIFp7VmHzmncNDREnndRyuuatK7f2TMCmrSG31vpwW7761dbLdcIJxc8HfOediK98peW0++8fseGGpS9vZbz1VsthL7xQ3P+Pf0T87nctp1uZbQAA0NkEgABApg0YUNxfXR0xenTETjs11hzbeeeIXXYpnm7GjIhPfSoN/G64IeLLX4744x9bzv9//qdTit3hRo5sOezllyOOOiri0Ucjfvvb9PbWBx5oOd38+cX9/fq1nObuu9PtOHJk+6FoU5tvHnHYYS2HX311xMc/HnHwwRFf+ELE8OEtw7eKirQ2ZmcZPbrlsOOPT2shPvpoxGmnpWVrTVVV55ULAGBleQYgAJBpn/50y1tfm/bX16e19C6+OGLChOLpZs9Oa/215fjjW94OuraqrIwYM6ZliHb99elrRV56Ka0dmL/1d6ONWm9xeMqUtLuyLR//8pfpe5s/b2/27IhJk9p+3//+b9vPCuwIe+0VceWVxcNmzIj49rfbf++0aS33JwCArqIGIACQaV//+orH52/z/cxn0qBr6NDS5nvYYa03qrE2u+mm9qepqGg9CG16u3B5edoCbltKeY5gU/37Rzz+eNoScSkqKiL+9KeII45YueWsrH32iTj88Pan23//lsNuu63DiwMAsMoEgADAGtPas+Pae0ZeW63otva+1oZNnJjeTtqaESOKnzM3enTEiy+moU9rDYNEpLXobrstvS24+fq0tvyePVufT6lKXc+Ils87bD7dqFFpC7t77NH6+7/2tfS5d60Fm3/5S3H/j34Uceyxrc+naYha6mc+dGi6jJtuSm9Fbk1FRfrZvP12xH//d+vTtLa81oZFtP7ZNC/btddG/Oxnrb9/6NCI22+PuOuudD9ravLkiJqaxv7mjcG0VSYAgM6QS5Jop+06AIB1X21txOuvp7eZfvzjaRjWtPXb1lRVRUydmjYcstlm6fPqNt00IpdbI0XuVB98kG6L2bPTIHSrrVYtlJo/P91Gs2en8/jkJzsm3Jo1K211+YMP0hqC22wTsfHGqz/fVVVbm26vd95J95vttuu8xkcAADqaABAAAAAAMswtwAAAAACQYQJAAAAAAMgwASAAAAAAZJgAEAAAAAAyTAAIAAAAABkmAAQAAACADBMAAgAAAECGCQABAAAAIMMEgAAAAACQYQJAAAAAAMgwASAAAAAAZJgAEAAAAAAyTAAIAAAAABkmAAQAAACADBMAAgAAAECGCQABAAAAIMMEgAAAAACQYQJAAAAAAMgwASAAAAAAZJgAEAAAAAAyTAAIAAAAABkmAAQAAACADBMAAgAAAECGCQABAAAAIMMEgAAAAACQYQJAAAAAAMgwASAAAAAAZJgAEAAAAAAyTAAIAAAAABkmAAQAAACADBMAAgAAAECGCQABAAAAIMMEgAAAAACQYQJAAAAAAMgwASAAAAAAZJgAEAAAAAAyTAAIAAAAABkmAAQAAACADBMAAgAAAECGCQABAAAAIMMEgAAAAACQYQJAAAAAAMgwASAAAAAAZJgAEAAAAAAyTAAIAAAAABkmAAQAAACADBMAAgAAAECGlUckXV0G6DI77rhjVxcBAACANeC5557r6iJAl1EDEAAAAAAyTAAIAAAAABkmAAQAAACADBMAAgAAAECGCQABAAAAIMMEgAAAAACQYQJAAAAAAMgwASAAAAAAZJgAEAAAAAAyTAAIAAAAABkmAAQAAACADBMAAgAAAECGCQABAAAAIMMEgAAAAACQYQJAAAAAAMgwASAAAAAAZJgAEAAAAAAyTAAIAAAAABkmAAQAAACADBMAAgAAAECGCQABAAAAIMMEgAAAAACQYQJAAAAAAMgwASAAAAAAZJgAEAAAAAAyTAAIAAAAABkmAAQAAACADBMAAgAAAECGCQABAAAAIMMEgAAAAACQYQJAAAAAAMgwASAAAAAAZJgAEAAAAAAyTAAIAAAAABkmAAQAAACADBMAAgAAAECGCQABAAAAIMMEgAAAAACQYQJAAAAAAMgwASAAAAAAZJgAEAAAAAAyTAAIAAAAABkmAAQAAACADBMAAgAAAECGCQABAAAAIMMEgAAAAACQYQJAAAAAAMgwASAAAAAAZJgAEAAAAAAyTAAIAAAAABkmAAQAAACADBMAAgAAAECGCQABAAAAIMMEgAAAAACQYQJAAAAAAMgwASAAAAAAZJgAEAAAAAAyTAAIAAAAABkmAAQAAACADBMAAgAAAECGCQABAAAAIMMEgAAAAACQYQJAAAAAAMgwASAAAAAAZJgAEAAAAAAyTAAIAAAAABkmAAQAAACADBMAAgAAAECGCQABAAAAIMMEgAAAAACQYQJAAAAAAMgwASAAAAAAZJgAEAAAAAAyTAAIAAAAABkmAAQAAACADBMAAgAAAECGCQABAAAAIMMEgAAAAACQYQJAAAAAAMgwASAAAAAAZJgAEAAAAAAyTAAIAAAAABkmAAQAAACADBMAAgAAAECGCQABAAAAIMMEgAAAAACQYQJAAAAAAMgwASAAAAAAZJgAEAAAAAAyTAAIAAAAABkmAAQAAACADBMAAgAAAECGCQABAAAAIMMEgAAAAACQYQJAAAAAAMgwASAAAAAAZJgAEAAAAAAyTAAIAAAAABkmAAQAAACADBMAAgAAAECGCQABAAAAIMPKu7oAAACsvoaGhsjlcrF8+fJYunRpNDQ0RE1NTdTW1kZdXV1ERNTV1UV9fX306NEjysvLo2/fvtHQ0FAYVlFREWVlZdGvX78oLy+PsjL/KwYAyAIBIADAOiwf6i1cuDBqamoKAeDy5csjSZI237N06dJYvHhxYZqysrKYP39+lJeXR5IksfHGG8eGG24oBAQAyAABIADAOmjRokVRX18fNTU1UVVVFXV1dbF8+fKiacrL0696ZWVlkcvlokePHpHL5SJJkqivr48kSaKhoSHKyspi2bJlERGF7qJFi2LgwIECQACADBAAAgCsQ5YuXRrV1dVRVVUV1dXVkcvlor6+vjC+X79+kcvlory8PNZff/1Yf/31o6GhIXr06BF9+vSJ5cuXR0NDQyRJEkuXLo0ePXpEWVlZVFVVxfLly6OqqirWX3/9GDBgQPTs2bML1xQAgI4iAAQAWMslSRK5XC4++OCDWLx4cSxcuLAwvGfPntGzZ8/o1atXDB48ONZbb73o0aNH9OzZM5IkiR49ehTNq1evXoW/+/TpU/i7X79+UVdXF5tssknU1tbGgAED1si6AQDQ+QSAAABrsSRJYuHChVFVVRVz5swpDO/du3f06dMnBgwYEOuvv35h2OooLy+P8vLy1Z4PAABrFwEgAMBaqKGhIRoaGmL27Nkxb968WLp0aURE5HK5GDBgQAwePDgqKioiIjynDwCAFRIAAgCsZRoaGqK2tjZmzpwZixYtKjTuscEGG0SfPn1i4MCBsd566wn+AAAoiQAQAGAtk3/WX3V1dURE9OzZM/r37x+VlZXRt2/fLi4dAADrGgEgAMBaZObMmbFgwYJYsmRJRESst956seGGG8bgwYNbNOgBAAClEAACAKwl5syZE/PmzSuEfxtttFEMGDAg+vfv38UlAwBgXSYABABYC8yePTs++OCDaGhoiIiIgQMHxoYbbhh9+vTp4pIBALCuEwACAHSx+fPnx/Tp0wv9+ef9rbfeepHL5bqwZAAAZIEAEACgC9XW1sYHH3xQ6K+oqIgtt9wykiTRyi8AAB1CAAgA0EXq6uriww8/jPr6+ohIG/zYYostIpfLqfkHAECH8W9lAIAukCRJVFVVxdKlS2P58uVRVlYWW2yxRfTs2bOriwYAQMYIAAEAusD8+fNj/vz5sXjx4oiI2GSTTaJXr15dXCoAALJIAAgAsIYtXLgwqqqqYuHChRERMXjw4BgwYECUl3s6CwAAHU8ACACwBjU0NERtbW3Mnz8/ItJGPyoqKmK99dbr4pIBAJBVAkAAgDWorq4uZs6cGRERvXr1ioEDB8bAgQO7uFQAAGSZABAAYA1JkiQWLFhQaOG3vr4++vfvH2VlvpIBANB5fNsEAFhDli5dGlVVVbF8+fKISBv+0OovAACdTQAIALCGLFmypNDwR0VFRfTt21ftPwAAOp2m5gAA1oClS5fG4sWLC4Ff3759o6KiootLBQBAd+BfzgAAa8CyZcti/vz50dDQED179oxNNtmkq4sEAEA3IQAEAFgDqqurY8mSJRER0adPn6ivr+/iEgEA0F0IAAEAOtny5ctj2bJlhf4+ffpEr169urBEAAB0JwJAAIBOliRJofGPXr16RZ8+fbq4RAAAdCcCQACATrZo0aLI5XIREVFfXx99+/bt4hIBANCdCAABADpZjx49IkmSiIgYMGBA9OjRo4tLBABAdyIABADoZAsXLoyGhoaIiOjZs2eUl5d3cYkAAOhOBIAAAJ1s6dKlUV9fH7169YoBAwZ0dXEAAOhmBIAAAJ0sf8tvQ0NDoSYgAACsKQJAAIBOVlZWFrlcLsrKyqK+vr6riwMAQDcjAAQA6GT19fWFRkB69+7dxaUBAKC7EQACAHSyurq6iIhYtmyZW4ABAFjjBIAAAJ1s8eLFERHRt2/f6NmzZxeXBgCA7kYACADQyfKNgESEABAAgDVOAAgA0MnKy8sjIn0WYG1tbReXBgCA7kYACADQyXr16hUREUmSxPLly7u4NAAAdDcCQACATpbL5SIibQxkyZIlGgIBAGCNEgACAHSyvn37Ro8ePaKhoSGWL18eSZJ0dZEAAOhGBIAAAJ1s/fXXL9QCXLJkSeFvAABYEwSAAACdrKGhIcrK0q9d9fX1ngMIAMAaJQAEAOhkvXv3jt69e0dE2hBIfX19F5cIAIDuRAAIANDJevbsGT179oyIiOXLl8eSJUu6uEQAAHQnAkAAgE6Wy+Wib9++UV5eHsuWLYuqqiohIAAAa4wAEABgDRgwYED06dMnIiKqq6tj2bJlWgMGAGCNEAACAKwBPXv2jAEDBkQul4v6+vpYsGCB1oABAFgjBIAAAGtAWVlZ9O7dO3r16hVJkkRtbW3U1dV1dbEAAOgGBIAAAGvIeuutF2Vl6devJUuWRHV1dReXCACA7kAACACwhvTs2TP69+8fEWlrwLNmzVILEACATicABABYQ8rKymLAgAHRr1+/iEhrAc6ZMyeWL1/exSUDACDLBIAAAGtQnz59YuDAgVFeXh719fUxd+7cqKmp0SIwAACdRgAIALAG9ejRIwYOHBh9+vSJiLQW4IIFC9wKDABApxEAAgCsYT179oyNN944+vbtGxERc+fOjY8++kgtQAAAOoUAEABgDcvlctG/f//YaKONory8PCIiqqqqoqampotLBgBAFgkAAQC6SP/+/WODDTaIsrKyWLp0acyePVsICABAhxMAAgB0kV69esWgQYOiV69e0dDQEFVVVTFz5sxYunRpVxdtleVbNl60aJFbmgEA1hICQACALrT++uvHZpttVmgUZMGCBTFt2rRYsmRJF5ds5dXX18cHH3wQH374YcydOzfq6+u7ukgAAIQAEACgS5WVlcWAAQNis802i4qKisjlclFfXx+zZs2K2trari5eyRYvXhwffPBBVFVVxfLly6OmpmadrskIAJAl5V1dAAAA0pqA9fX1UVdXF7W1tTF//vyIiBg8eHD07t07evbs2cUlbF2SJFFTUxOzZ8+OqqqqiIioqKiIDTfcMPr169e1hQMAICIEgAAAa4UePXrEoEGDonfv3jFjxoxYtGhRzJkzJ6qqqmKjjTaKDTfcMHr16tXVxSxIkiTq6urio48+iqqqqkJtvz59+sSQIUNi/fXX7+ISAgCQJwAEAFiL9OvXLzbddNOYNWtWVFdXR11dXcydOzd69OgR/fr1i379+kUul+vqYkZtbW3MmTMnPvroo4hIb2Veb731YtNNN40NNtigi0sHAEBTAkAAgLVMv379YqONNor6+vqoqamJZcuWxYwZM6J3796x0UYbxfrrr99lt9fW1dXFvHnzYsGCBbF48eKIiCgvL48NNtggBg0apOYfAMBaSAAIALCWKSsriw022CA22GCDmDt3bsyePTuWLFkSy5Yti9mzZ8eCBQti0KBB0a9fvygvL4/y8vJOqxWYJEnkcrmoq6uL6urqmDVrVixZsqSohd9NNtkkBgwYsFbdogwAQCMBIADAWmyDDTaIhoaGqK6ujgULFsSyZcti2bJlsXjx4sItwfmGQjpaPnSsra2NRYsWRU1NTSxfvjwi0mcWrrfeerHJJpu45RcAYC0nAAQAWIuVl5fHRhttFIMHD465c+dGTU1NVFdXx/Lly2PhwoWxcOHC+PDDD2PAgAGFWoM9e/aMurq6yOVy0aNHj3aXka/llyRJLFmyJHr16hULFiyIqqqqqK6ujoaGhkiSJJIkiYj0FuWKiooYMGBA9O3bt7M3AQAAq0kACACwDigrKyvc9tuvX79YtGhRLFq0KOrq6qK+vj6qqqqiqqoqIiLWX3/96NOnT/Tr1y+SJImePXtGWVlZVFRURE1NTZSVlUWvXr1i6dKlsWTJkoiIqK6ujvr6+qirq4uampro1atXLFu2rKgMvXr1igEDBsTAgQOjT58+JYWLAAB0PQEgAMA6okePHtG3b9/o27dvLF26NGpqaqKmpiYWLlxYFNblh3/00UdRXl4evXv3jkWLFkWPHj2id+/e0bt371i4cGGUl5cXAsR8DcC8ZcuWRXl5efTp0yc22GCDyOVy0bt376ioqIiysrKuWH0AAFaRABAAYB2UD/IGDx4cS5cujQULFhSeEZhvtKPpKyKivr4+Fi9eXGi9t2lDHvnwr2fPntGzZ88oLy+P/v37x4ABAwo1CAEAWDcJAAEA1nG9e/eOysrKqKysjPr6+kKDHUmSFBrtyN/Wmw8Hc7lc9OrVK+rq6qKhoSH69+8fZWVlhVp+y5Yt83w/AICMEAACAGRIjx49on///tG/f//Vmk95ua+JAABZ4V4OAAAAAMgwASAAAAAAZJgAEAAAAAAyTAAIAAAAABkmAAQAAACADBMAAgAAAECGCQABAAAAIMMEgAAAAACQYQJAAAAAAMgwASAAAAAAZJgAEAAAAAAyTAAIAAAAABkmAAQAAACADBMAAgAAAECGCQABAAAAIMMEgAAAAACQYQJAAAAAAMgwASAAAAAAZJgAEAAAAAAyTAAIAAAAABkmAKRbS5JEv379+vXr169fv379+vV3g37oznKJI4JubIcddujqIgAAALAGPP/8811dBOgyagDS7eVyOV1dXV1dXV1dXV1dXd1u0IXuSg1AujU1AAEAALoHNQDpztQABAAAAIAMEwACAAAAQIYJAOnWuroVKv369evXr1+/fv369evXv2b6oTvzDEC6tTFjxnR1EQAAAFgDXnjhha4uAnQZNQABAAAAIMMEgAAAAACQYQJAAAAAAMiw8q4uAHQlj8AEAAAAsk4NQAAAAADIMDUA6dbUAAQAAACyTg1AAAAAAMgwASAAAAAAZJgAkG4tfwuwrq6urq6urq6urq6ubra70J3lEkcC3dh2223X1UUAAABgDXjppZe6ugjQZdQABAAAAIAM0wow3ZoKsAAAAEDWqQEIAAAAABmmBiDdmhqAAAAAQNapAQgAAAAAGaYGIN2aGoAAAABA1qkBCAAAAAAZpgYg3ZoagAAAAEDWqQEIAAAAABmmBiDdmhqAAAAAQNYJAOnWmgaAuVxOv379+vXr169fv379+vVntB+6s1zS9IiAbmbrrbfu6iIAAACwBrz22mtdXQToMmoA0q3JvwEAAICsEwDSrQkAAQAAgKzTCjAAAAAAZJgagHRragACAAAAWacGIAAAAABkmBqAdGtqAAIAAABZJwCkWxMAAgAAAFknAKRbEwACAAAAWecZgAAAAACQYWoA0q2pAQgAAABknQCQbk0ACAAAAGSdAJBuTQAIAAAAZJ1nAAIAAABAhqkBSLemBiAAAACQdQJAujUBIAAAAJB1AkC6NQEgAAAAkHWeAQgAAAAAGaYGIN2aGoAAAABA1gkA6dYEgAAAAEDWCQDp1gSAAAAAQNZ5BiDdWvMAUL9+/fr169evX79+/d2zf+DAgZEkSQwcODD23HPPGD58+FpVviz39+jRI7beeuvYaqutokePHp22POjOcokjgm5sk0026dD5DRkyJCIiZs2a1aHzBQAAOs+gQYNixx13jIceeigGDRoUw4YNi0GDBsW8efPiueee6+ritTB8+PAYOHBgof+dd96JefPmRUQUyp83f/78ePvtt9d4GVfG1ltvHZtvvnlEREyfPj1ee+21TlnOhx9+2CnzhXWBW4Dp1jo6/06SJLbbbrv48MMP44033oilS5d26Pxb86lPfSp+/vOfx4gRI2LZsmXx1FNPxWGHHRb19fWtTv/Vr341Tj/99Nh0001jwYIF8fvf/z4uvvjiwvif/OQn8eUvfzkqKirio48+itNPPz0efvjhTl8PAADoCoMGDYoddtghkiSJJEli7ty5MXfu3Bg+fHgMHz489txzz3jooYe6uphFBgwYEAMHDiyEfvmyN/970KBBRf1rq7bKD3QcASDdWmcEgEmSxMYbbxwbbbRRvPHGGzF9+vQOXUZT/fr1i7vuuit69OgRF198cWyyySZx5JFHxj333BP77LNPi+l33HHHuPLKK2P69Onxwx/+MPbaa6845ZRTYtmyZXHFFVfEd7/73TjqqKPi0Ucfjb/+9a9x3HHHxU033RRf+MIX4sUXX+y09QAAgK7QNPyLKP598NZbb0WSJDF8+PAYNmzYWleLbt68efHss8+2GJ4PMCMixo4dGxFr/62w06ZNK5TxjTfeWOvLC+sizwCkW2t+oV/dblP551iMGTMm1l9//U5Z3qGHHhrrr79+nHbaafGLX/wivve978U///nPGDNmTAwYMCCSJIk777wz7rnnnoiIOPvssyMiYpdddonrrrsuvvKVr0RtbW1861vfioiI4447Lj766KP46le/Gtddd11MnDgxcrlcfPe73+2U8uvq6urq6urq6uquye7AgQMLgd7w4cNjxx13jKaSJCm6hfatt96K+fPnx/Dhw9eK8me1W1dXF6+99lpMnTo16uvrO2050J2pAUi3liRJh15Ums4vb/DgwbHTTjvFW2+9FW+//XaHLu9Tn/pURET86U9/ioaGhohI/3s2ZsyYGD16dDz22GMxevTo6NGjRzQ0NMTHPvaxmDt3bixevLhQvo8++iiGDh0aDQ0N0a9fv/j73/9eWI/p06dHkiQxcuTIwvy7+suBrq6urq6urq6u7qp2Bw4cWPR8vPy4pv0NDQ0xfPjwmDdvXsybNy/efPPNGDt2bAwYMCDmzZu3VqzHW2+9Vfi7I6br6m6PHj1i5MiREZH+nmloaOi05UF3JQCENWTLLbcs3Bacf1ZHR8yzoaGh6Hl/r776akREbLbZZhERse2220ZZWVrZd8MNN2zx4Nv//Oc/8bGPfSw+8YlPRC6Xa9GASU1NTaEGIwAAdBf5EHBtVGq51tbyNzdy5MhCIyAREVOnTu3C0kA2CQDp1jr6v0BN/7O2ovEdtdz11luvMN+85cuXR0TE+++/H0mSRE1NTWFceXl59OjRo2j6+vr6aGhoiL59+xamaT6+urraf8wAAFjnrcz39Xxtv/xr4MCBhWfr0bGa11D02wM6nmcA0q2tyWrlb731Vjz55JMdetvAnDlzoqysLAYOHFgYPmTIkIiI+Mc//tFi+sWLF8fgwYOLhg8aNCiqqqrinXfeiYiILbbYomh8375948033+zU7aWrq6urq6urq6u7JrttaT4+SdLvyxGx1tz+G5E+yzDfwm9HTNfV3ddffz2mT58e//nPf4oaBFlTnzt0B2oA0q0lSec/A3DOnDkxbdq0qK6u7rDl5LvPPvts7LrrrvG5z30ubr/99oiI+NznPhe1tbVRW1vbYvp///vfse2220ZFRUUsXLgwIiJGjBgRr776aixatCiWLVsWW265ZWE9Ro4cGb169Yrnn3/eMwB1dXV1dXV1dXXX+e7cuXML/RHpI3WaSpL0GYBvvvlm4fl5+efRzZkzp8vLn+/mGyWZM2dOh0zX1d2GhoZ45ZVXoqnOWh50V2oAQiepq6uLqVOnxnPPPVcI/zraVVddFUmSxOWXXx6f+9zn4jvf+U5st9128eyzzxamueeee+LBBx+MiIirr746crlc3H///fFf//Vfccstt0Tv3r3jjjvuiIiIyZMnR2VlZfz85z+P//qv/4pJkyZFkqQtCQMAwLpu3rx58dZbbxVezzzzTJvTRKQB4ZZbblnop3P06NEjRo0aFaNGjYoePXp0dXEgk3KJGJxuLH/rbEfZeOON49Of/nR88MEHMW3atFi6dGmHzr81X/3qV+NXv/pV5HK5iIj48MMPY/vtty8s+7333ouePXsWGgW59tpr46CDDiq8/6GHHoqvfvWrERHRs2fPeOqppwotoyVJEt///vfjV7/6VaevBwAAdIVBgwbFuHHjIiLiL3/5S2F4PvxrPnxtMG7cuBg0aFChkY+33nqr8PegQYMK5c5P01rQuTYZNWpUfOxjH4uItJHCzmoEZP78+Z0yX1gXCADp1gYMGFAIziLSwGt1+ocMGRK5XC5mzpzZIfMrtb93796x1157xdSpU+Ptt99ud/rBgwfH7rvvHo888kjRRTA/fsstt4xRo0bF/fffH3V1dZ1efv369evXr1+/fv36u7I/HwI+8MADkcvlCgHb3Llzi+6uWVvKu+WWWxae7ZfL5QoBYJIkMXjw4EIAmCRJzJ8/v1CDcW0pf/P+fACYJElMnz69EAB29PIEgHRnAkC6tQEDBnR1EQAAgLVAvrZcPgxcF2rOZUWPHj1iq622ioi0QZD6+vpOWU5VVVWnzBfWBQJAurUNNtigq4sAAADAGrBgwYKuLgJ0Ga0A063JvwEAAICs0wowAAAAAGSYGoB0a2oAAgAAAFmnBiDdWj4A1NXV1dXV1dXV1dXV1c12F7ozjYDQra2//vpdXQQAAADWgJqamq4uAnQZNQDp1rr6P1C6urq6urq6urq6urq6a6YL3ZkagHRr/fr16+oiAAAAsAYsWrSoq4sAXUYjIHRr8m8AAAAg6wSAdGsCQAAAACDrPAMQAAAAADJMDUC6NTUAAQAAgKxTAxAAAAAAMkwNQLo1NQABAACArFMDEAAAAAAyTA1AujU1AAEAAICsEwDCGiJsBAAAKJbL5bq6CNAtCABhNWy++eax1VZbRa9evbq6KAAAAJm0bNmyeP3112P69OldXRRYZ3kGIN1ae7Xy2hsv/AMAAOhcvXr1iq222mq1f79Bd6YGIN2eiwgAAMC6we83WDVqAMJqeO2112LZsmVdXQwAAIDMWrZsWbz22mtdXQxYp+US8TjdWHm5SrAAAADdQV1dXVcXAbqMGoAAAAAAkGECQAAAAADIMAEg3Vr+DnhdXV1dXV1dXV1dXV3dbHehO/MMQLq1Hj16dHURAAAAWAPq6+u7ugjQZdQABAAAAIAMEwACAAAAQIYJAOnW2ns2hPHGG2+88cYbb7zxxhtvvPHZGA/dmWcA0q2VlcnAAQAAuoOGhoauLgJ0GekHAAAAAGSYABAAAAAAMkwACAAAAAAZJgAEAAAAgAwTAAIAAABAhpV3dQGgK2kEGwAAAMg6NQDp9nK5nK6urq6urq6urq6urm436EJ3lUtUgaIbcxEAAADoHsQfdGdqANLtdfV/oHR1dXV1dXV1dXV1dXXXTBe6KzUA6dZcBAAAALoH8QfdmRqAAAAAAJBhWgGmW/MfIAAAACDr1AAEAAAAgAwTAAIAAABAhgkAAQAAACDDBIAAAAAAkGECQAAAAADIMAEgAAAAAGSYABAAAAAAMkwACAAAAAAZJgAEAAAAgAwTAAIAAABAhgkAAQAAACDDBIAAAAAAkGECQAAAAADIMAEgAAAAAGSYABAAAAAAMkwACAAAAAAZJgAEAAAAgAwTAAIAAABAhgkAAQAAACDDBIAAAAAAkGECQAAAAADIMAEgAAAAAGSYABAAAAAAMkwACAAAAAAZJgAEAAAAgAwTAAIAAABAhgkAAQAAACDDBIAAAAAAkGECQAAAAADIMAEgAAAAAGSYABAAAAAAMkwACAAAAAAZJgAEAAAAgAwTAAIAAABAhgkAAQAAACDDBIAAAAAAkGECQAAAAADIMAEgAAAAAGSYABAAAAAAMkwACAAAAAAZJgAEAAAAgAwTAAIAAABAhgkAAQAAACDDyru6AKuipqYmnn766XjllVfixRdfjAULFsTWW28dW2+9deyxxx6x6aabtnhPVVVV/N///V+h//Of/3yMHDmy08s6e/bsuPfee+PZZ5+NDz/8MLbaaqsYMWJE9O3bN+bOnVuY7tvf/naUl3fsx/H444/HK6+8stLv22+//aJ///5x8803F4Z97nOfi6222qojiwcAAADAGpBLkiTp6kKsjMcffzwOPfTQmDFjRpvT/PznP4/jjjuuKFB7/fXXY+utty7033TTTXHooYd2aln//e9/xx577BHvvPNOi3E77rhjPPfcc4X+mpqa6NevX4cu//jjj49f/epXK/2+Bx98MLbYYosYMWJEYdjvf//7+MY3vtGRxQMAAABgDVinbgH+/ve/H7vvvvsKw7+IiJNOOim+8pWvRENDwxoqWetuvvnmVsO/XXbZJXK5XBeUCAAAAIDuZp25Bfjhhx+OCy+8sGjYqFGjYrfddovZs2fHH//4x6Jxd999d1x77bVx7LHHrsliFnnzzTeL+h955JHYYostor6+vkXtw84IBEePHh0TJ04sGvbqq68WhZITJkyIQYMGFU2z0UYbRe/evYtqAG6wwQYdXj4AAAAAOt86cQvw8uXL41Of+lS88cYbhWE//vGP46yzzooePXpERER1dXUcf/zxRc+tGzZsWLz55ptRVlbWJbcAf/azn43HHnssItKw8tVXXy2MO/zww+P3v/99RERUVFTEwoULO7UseT/60Y/ihz/8YaH/+eefjzFjxqyRZQMAAACw5q0TtwDfeuutReHfxIkT4/vf/34h/ItIQ7QrrrgiKioqCv3bbrttUUMbzf3jH/+Ir3/96zFkyJDo379/fOlLX4o//OEPbU4/derUOOqoo2KbbbaJXC4X22yzTRxxxBExZcqUoumuu+662G+//eL5558vDJs+fXrst99+sd9++8W0adNihx12KIwbP3580ftnzpwZZ555ZowbNy769+8fuVwuhgwZEjvvvHP86le/ikWLFrWzxVbfhx9+WCjvfvvtF48++mhh3B133FEYftppp8XSpUvj8ssvj8997nORy+Viu+22i5NOOqnVbZ8kSdx7772x7777xvDhwyOXy0Uul4vhw4fHd7/73fjnP//Z6esGAAAA0J2sE7cANw+Fzj777Fanq6ysjEceeSR69+4do0aNKgoIm/v5z38ezz77bNGw++67L+67776YNm1aUS25iIjrr78+jjrqqKJhU6dOjalTp8bvfve7uOCCC+IHP/hB5HK5+Ne//hX33HNP0bTV1dWFYWeddVaMGTOmEFY2DQBfeeWV2HbbbVuUd/bs2TF79uyYMmVKPP300/G73/2uzXXrCDU1NUXr8JWvfKXw95tvvlkYN3To0Jg6dWo8+OCDhfEvv/xyvPzyy/GHP/whHnroodhuu+0iIg3/vvnNb8ZNN93UYnnvvPNOXHHFFXHFFVfEq6++GqNGjeqsVQMAAADoVtaJGoDTpk0r6m8tIMvbcccdY9ttt11h+BcRhfCvoqKiEMTlnXvuuTFnzpxC/8svv9wi/Gvuhz/8Yfz5z39e4TRNTZgwIRYuXBgLFy6MCy64oDD8yCOPLJpuzJgxsc8++xSV8fe//32LZx52lRkzZhSFf03Nnj07vvGNb0R9fX1ERPzlL38pCv8qKipin332aRH2HXbYYbFs2bLOKzQAAABAN7JOBID/+te/Cn9XVFREv379OmS+1157bcyfPz/mzZvX4nmATz31VOHvk08+uWjc7bffHsuWLYsXXnihKLw6++yzo76+Ps4///x49913i56tN3r06Hj33Xfj3XffjbFjx7ZanpqamqJaiVdffXU8//zzcf/998fMmTMLjXKMGTOm6PbirlZRUREPPvhgNDQ0xH/+85+i9Xv55Zfjtttui4iIv/71r0Xvqaqqivvvvz9effXVuP766yMircW5ySabFD0vEQAAAIBVt04EgDNmzCj83adPnw6Z58SJE+Poo4+OHj16RHl5eZxwwglF4+fNmxcREVVVVYWGPCIi9t9//zjooIOiZ8+esf3228cpp5xSGDd16tR49dVXY+DAgfGJT3yiKKisqKiIT3ziE/GJT3wievbs2WqZmtdaPP744+Nb3/pWTJo0KRYsWBBPP/10LF26NJ5//vkWLSJ3pQsvvDD22muvyOVysfnmmxcaN8l7/PHHI6K4JeHq6uoYO3ZsXHbZZfH000/H17/+9Zg9e3bMmjUr7r333th+++3X6DoAAAAAZNU68QzAESNGFBoBmT17dtTX17d7i2978s+lyxs0aFBR/9KlSyMi4t133y0aPmfOnDjrrLMK/R988EHR+OnTp8fo0aNXqUx9+vSJPfbYIx555JHCsN/+9rfx29/+NiLSmn8HHHBAHH300TFkyJBVWkZn+MIXvlDUv/XWWxd9Zq+//npEpK0iN/XCCy/ECy+8EBFpQDpx4sQ47LDDWswPAAAAgFW3TtQA3GabbYr6Z82a1ea0jz32WNx8881Fz/Brzcc+9rGi/rZq5b333ntF/VOmTIlLLrmk8GreoMW///3vFS63PTfddFMMGzas1XEvvPBCnHPOObHxxhvHXXfdtVrL6UiDBw9uMWzo0KGFv/Mh6s477xwXX3xxq/Oorq6OW265JfbZZ5/Ya6+9ora2tnMKCwAAANDNrBMB4NZbb13Uv6IGMH7wgx/EYYcdFhtttFF87nOfi/fff7/V6ZrfSpzL5VqdrnkDIcOGDYuJEye2+Ro4cGApq9SmTTfdNKZOnRq33npr7L333m1Od8ABB0RVVdVqLaujzJ07t8WwpgFs0zDwzDPPjBdeeCFOP/30ouFNTZ48OS677LKOLygAAABAN7RO3AK87777Fj3z7sc//nEceuihLcK2++67L6ZMmVLof+WVV1b7VtmPf/zjRf2f/exn4ze/+U2hf/HixfH+++/HFltsEeXlq7856+rq4oMPPoiNNtoo7r777kiSJJ599tmYPHly/PrXv47Zs2cXpn3llVdi5513Xu1lrq7HHnssttxyy0J/VVVVvPzyy4X+kSNHFv5esGBBJEkShx12WFx66aXxzjvvxJQpU+IPf/hD3HfffYXp8s8NBAAAAGD1rBM1ACdMmBCHHXZYoX/27NkxevToeOihh2LBggVRVVUVv/nNb+L//b//V/S+U089dbVDuU984hNRWVlZ6L/99ttj/vz5hf6TTjopRowYET179oztttsuXnrppVVe1g033BA9e/aMYcOGxec///n4+c9/Huutt17ssssuccEFF8T5559fNH2vXr1WeVkd6fvf/3688847ERHR0NDQopw77LBD1NfXx/Dhw2PAgAGxww47xIQJE6KqqiqGDRsWhx12WNx9991FtS2XL1++RtcBAAAAIKvWiRqAEREXX3xx0fP2ZsyYscJbZEeMGBHf/va3V3u5PXv2jHPOOSdOPPHEiEifVTd69Og49NBD47XXXou77767MO2SJUtaPK9wZTRfnzPOOCOeffbZGD9+fEydOjVuv/32ovGr2thIR5s9e3Z8+tOfjj322CPefffdotp/lZWVceSRR0aPHj3iS1/6Uvz85z+PiHQ7/td//VcceOCBsd5668VDDz0U1dXVhfdNnDhxja8HAAAAQBatMwHgpptuGq+88koccsghRQFTayorK+Ohhx5a7efx5X3rW9+KJ598Mm655ZaISMPH1hqz+P3vf79aNQ433XTTuPHGG+Mb3/hGYdikSZNi0qRJLaa9++67Y7311lvlZXW06urqojA071e/+lX069cvIiLOO++8ePLJJ+PZZ5+NiIg33ngjLrroohbvGTt2bPzP//xP5xYYAAAAoJtYJ24Bzttmm23imWeeiXPPPTdGjBjRYnxFRUX85Cc/iTfeeKPFs/uaN/LRo0ePov6ysrI2+9dbb724+eab42c/+1mrLfROnDgxXnrppRg/fnyby2g+/7Ycdthh8eijj7ZZu3H33XePKVOmxH777VfS/JprXo62Gj9pPnxF5Z86dWrsvvvuRcOGDRsWTz75ZHz5y18uDBs4cGA89NBDcd5557VoXCUi/fx+/OMfx8MPPxx9+/Ztb1UAAAAAKEEuSZKkqwuxqhYtWhSvvfZa1NfXF57V11ag1ZEWLlwYr732Wqy33nrxsY99rMNqGja3YMGC+OCDD2Lu3Lmx4YYbxuabb16oTdeVLrroojj77LML/QsXLoyKioqYN29evP766zFs2LDYeOONVziPurq6mDlzZsyYMSPKyspi6NChsfHGG5cclAIAAABQmnU6AKRrtBUAAgAAALD2Ud0KAAAAADJMAAgAAAAAGbbOtALM2mPLLbeML37xi4X+nj17dmFpAAAAAFgRzwAEAAAAgAxzCzAAAAAAZJgAEAAAAAAyTAAIAAAAABkmAAQAAACADBMAAgAAAECGCQABAAAAIMMEgAAAAACQYQJAAAAAAMgwASAAAAAAZJgAEAAAAAAyTAAIAAAAABkmAAQAAACADBMAAgAAAECGCQABAAAAIMMEgAAAAACQYQJAAAAAAMgwASAAAAAAZJgAEAAAAAAyTAAIAAAAABkmAAQAAACADBMAAgAAAECGCQABAAAAIMMEgAAAAACQYQJAAAAAAMgwASAAAAAAZJgAEACgG/voo49ixowZUVdX19VFAQCgk6wTAeAOO+wQuVyuzddxxx1X8rzOP//8yOVyUVtb22nlTZIkJk2aFP/85z8jImLJkiWRy+Xi3HPP7fBl1dbWxkUXXRRLlizpsHk2L//a5vLLL49cLhfz589vdfwFF1wQuVwuFi9e3OllGTduXHzuc58r9D///PNxxx13FPp33XXX2HnnnTu9HB3h8ccfj1wuFw899FBXF2WF1uTnW6qnnnoqcrlc3Hvvva2Or62tjVwuF+eff36HLrf58f/oo49GLpeLhx9+uMOX0xnlb03zY6i5VdlPO+M8uTqan2OXLVsWuVwuzjnnnDVelqbnsAceeCByuVw88cQTa7wca4s1dR5s7zrWno46Dy5evDh23nnnqKysjM033zz+/ve/r9b8IiJmzZrV6ne14cOHx/HHHx/vvvtu0fSf/exnY6eddlrt5a6Nmh/rnXWOXrhwYeRyubjwwgtXuWwdpb1z+Jry4x//OHK5XNTU1KzS+I6yNlx/9t1339hhhx26bPkr0l7ZOmM/7ajjcE3tQwAdaZ0IACMiKioq4rzzzmv1tc8++5Q8nyFDhsSYMWOirKzzVv2JJ56Igw8+OD766KOi4Q0NDR2+rCuuuCLOPvvsqK+v77B5tlX+tUV+OyZJ0ur4/PC2xnek+vr6QnmWLVsWO+64Y7z00kuF8Q0NDR362XSmioqKGD9+fPTv37+ri7JCa/LzXVnt7ZMdfQ5ofvx31nI6a77NtXYMNbcq+2lnnCdXR/NzbFlZWYwfPz423XTTNV6WpuewvLXx2FpT1tR5sL3rWHs66jz4wAMPxJQpU+LII4+Me+65J3bcccfVml/TMo0dOzbOO++8OPfcc+P444+P4cOHx69+9asYNmxY/Pvf/y5Mv/XWW8c222yz2stdGzU/1temc3RnfNcr5Ry+prR3jG2yySYxfvz4Tv09ELF2XH8aGhrW2tq97ZWtM/bTjjoO19Q+BNCRyru6AKXaaKONOqQG3bHHHhvHHntsB5SobZ39I7mzl7Umy8/aY8yYMfHkk092dTFYCd3xWF2V/XRt207Ny1NeXu7YW0t0t/Pg7NmzIyLi9NNPj6222qpD573zzju3+N52zTXXxHHHHRdHHnlkTJ48OcrKyuLqq6/u0OWuTda2c09Ta3PZ1oSjjjoqjjrqqE5fTnffzqtrbd5+a2ofAuhImfqXxXPPPRfjxo2LRx55JL70pS9FLpeLkSNHxi9/+cvCf3uuvfbaGDduXCxdurTwvjvuuCPGjRtXmP773/9+oar+aaedFrvuumssW7asaFnHHXdcfPWrX21Rhqeeeiq+/e1vF6Y588wzC+OqqqriW9/6VvTv3z8233zzOPnkk2P58uWF8TU1NXHyySfH8OHDo3///rHXXnvFCy+80Ob6Xn311XHVVVdFRHqr6R/+8IeIiPjzn/8cn/vc56J///6Ry+Vihx12iPvuu6/wvtra2jjxxBNjyJAhkcvlYty4cfG73/2uzfKv7DbIa68cxx13XFxwwQVx2WWXxciRI6N///6x3377xXvvvVc0n+uvvz523nnn6N+/f3z5y1+O999/v81lNnXPPfcUbh//7Gc/G3/729+Kxr/99tvx5S9/Ofr37x9DhgyJb3zjGzFr1qzC+OXLl8cVV1xR2DdyuVzsu+++8frrr7dYVv42qoh0H9t1110L4+rr6+PSSy8tfK5f/vKX44MPPoiIiJtvvjnGjRsXL7/8ctH8rrvuuhg3blzMnTu3xbKuueaa+O///u+4/PLLo3///rHvvvtGTU1NNDQ0xC9/+cvYbrvtCtv71ltvLXpvdXV1fPe7342RI0fGkCFD4tRTT41XX301IiJeeOGFGDduXDz11FMRkX4+Z599dpx11lkxZMiQGD58eJx11lmxaNGionk+/PDDseuuuxaOn3PPPbdoX3n33Xdjv/32i/79+xf266effrqNT23F+2dT7X2+7e1/bW3HUo7DJ554Ir785S9HLpeLnXfeOaZMmdLm+jQ1c+bMOPjgg6N///4xcuTIuPLKKyNJkqirq4tdd901TjzxxKLp8/vV5Zdf3mJebR3/ERGvvfZafOELX4hcLhfbbbddXHPNNUXvXdlzTXvlzytlH7zqqqti5MiRhf3l7LPPjqVLl67wGGqqtf10ReeRtrZTe8d/a/vH3/72t3avMRER//rXv+Lggw+OzTffPHK5XGy++eZx+eWXR319favn2Lq6uhg3blxREPLPf/4z9t1330L5DjvssMJ5IyI9R3zpS1+K++67L3baaafCsfLoo48Wba/2joO2vPLKK60ee02vs809/fTTMW7cuLjxxhtjyJAhscMOOxRu22rvPBER8b//+7+Fc/3Xv/71+Mtf/lIYt2TJkrjwwgsL+84OO+wQN954Y9H7lyxZEueee25ss8020b9///j2t78dl1xySey7774RkdZOGjduXNx2221xzDHHxJAhQ2LIkCFx4oknFm6lbbp/TZs2LcaNG9fq69RTTy0st5R1W5Xr2GuvvRZHHHFEDBkyJLbZZpv4yU9+EnPmzCmaZnWuc9///vfjvPPOi4iIr371q4V1ausY7QjHHntsHHnkkfHII4/EtGnTIiI9Dpr+iG5v+Su6jkWk56n859v8mrN48eLYaaedCuud99FHHxUdg+2dI9u6fjS1ou+DnXWObmplz0MRa+YcHrF6n+HKTNPUm2++GTvvvHPst99+sWjRovjtb38b48aNi8WLF5d0boho/xzTXGvXn1U9T5ZaxqVLl8Y555xT2LZnnHFGi+N3ZY/xUr4Pl/KdvpSyNdXWflrK8dHePhax4uOwlN+UTfeh/Gf0/e9/v7DM0047LS666KL4+te/HhERixYtinHjxsX1119fVI5TTz21ME3E6h2HAO1K1gFjxoxJhg0blixbtqzVV95f//rXJCKSiEhGjRqVnH766cmwYcOSiEjuv//+JEmS5Ic//GESEcmiRYuSJEmSG264IYmIZMyYMclll12WHHrooUlEJPvss0+SJEly3XXXFb0/SZLkww8/TCIiOeecc1qU9c0330wOO+ywJCKSQw45JLn11luT2traQrmGDRuWnHHGGcmYMWOSiEh+8IMfJEmSJHV1dcnYsWMLy77ooosKZX/ppZda3S6TJ09OdtlllyQiku9973vJk08+mTzyyCNJRCSjR49Ozj///OToo49OKioqkohIZsyYkSRJkpxxxhlJRCRHHXVUcvHFFxeWe++997Za/pXdBkmSlFSOCRMmFLbLYYcdVljumDFjCvOZNGlSEhHJHnvskVx66aWF7RYRydy5c1td9nnnnVeY5pBDDkmOPvroQv/s2bOTJEmSGTNmFMpz9NFHF/aLoUOHFvaN888/v+jz2H///ZOISEaMGJE0NDQU9s3dd989Wbp0aXLOOeckEZFMmDAhufDCC4vWsaKiIjnxxBOTiRMnJhGRjB8/PkmSJHnttdeSiEjOOuusonUYNWpUYZrmfvCDHxTWZ/To0YXtlf9cR40alVx44YXJnnvumURE8utf/zpJkiRZvnx58sUvfjGJiGT33XdPzjrrrKSioiIZMWJEsnz58uTRRx9NIiJ54IEHWpT9yiuvTM4888zCZ5V3zz33JBGRVFZWJmeffXZhW3/ta18rTDNmzJikoqIiOeOMM5If/vCHSWVlZRIRyUcffdTq+q1o/yz18y1l/2ttO5ZyHL755ptJRUVFMmzYsOSSSy5JDjzwwMJ8/vznP7e6TosWLSo6N1122WXJ3nvvnUREYV855JBDkohI5s2bV3jfnXfemUREMnny5BbzbO34b3oO3GWXXZLvfOc7he394IMPJkmyaueaUspfyj547733JhGR7Lnnnsmll16aHHDAAUlEJGeeeWabx1Bzbe2nbZ1HWttOpRz/re0fpVxj5s+fXzgmvvOd7yRnnXVWYZpJkya1eo5dunRpoXxJkiSvvvpq4di74IILCsdeZWVlYf9oehzsueeeyamnnlronzNnTsnHQf4cliRJcv/99ycRkTz++OPJ8uXLk8rKymTs2LFF2/+UU05JIiKZP39+i8/mwQcfLJRhxIgRSUVFRTJz5sySzhMXX3xx4Rr5wx/+MBk1alQSEclbb72VJEmSHH744UlEJAcccEBy+eWXFz73Sy+9tDCPE044IYmIZOzYscmPfvSjwr5fWVmZJElSdC2uqKhITjrppMK+ceaZZ7bYv2bMmJGcfvrphdcZZ5yRjBgxIomI5Dvf+U6SJKWdA1flOvbhhx8mQ4cOTSIi+eY3v5kcc8wxhf21+ee/qte522+/vXAcn3LKKclNN920wmO0VPnvB6eeemqr4/PntbvvvjtJkiTZZZddCvtZe8tv7zq2aNGiwmd0yimnJBdeeGFhO06ZMiVJkiT54he/mFRUVCRLliwplOnaa69NIiJ58sknSzpHtnUdbqq1Y72zztFVVVVJRCQ/+tGPkiRZtfNQkqyZc3hHfIalTJP/Drdw4cLkP//5TzJ06NCkoqIiefXVV1uML+XckCTtn2Oaa+36s6rnyVLLeOKJJyYR6Xeon/zkJ4VzwOjRo9v9DNtSyvfhUr7Tt1e25lrbT0s5Ptrbx0o5Dku53jfdh5ruHyeddFLyi1/8ojDPUaNGJUnS8jjN23vvvQvTJMnqHYcA7VlnAsD8Sbi114svvpgkSePJ+oADDii8Nx+wnH766UmSFAeAdXV1hQtCbW1t4T35L9qTJ09O5s6dm0REcvjhhxfGX3311UlEJK+88kqr5c3/gMhfRPIX7crKykLokf/Bl//hddtttyURkZx//vmF+Xz00UdJRCT7779/m9smf/GpqalJkiRJjj/++CSi8cdd0/LmA5QxY8YUXWxnzpyZ7LLLLskNN9zQavlXZRuUUo78l4UXXnihME3zEKSioiIZPXp0snz58iRJ0i99+S957QWAF198cWHYZZddlkRE8thjjyVJkiTHHXdcEhHJo48+Wpgmf0G98sork4aGhmTEiBHJ6NGjk7q6usI0Bx10UFH5mv54zn+m+VC36Trmv5AmSZLss88+SUQUfnyMHTs2qaysTOrr65MkSZKXX345iYjkmmuuaXX98j88rrjiiiRJkqShoSF57733ivanJGn8EVFRUZHU1tYmd911VxIRyS9+8YvCNHfffXcSEcmNN97YZrCS3w+SpHF/e+aZZ5KGhobCl6GqqqrCNPmw4vnnn0/mzZuXRERy2mmnFcZPnjw52XPPPZOnn3661fVrb/8s5fMtZf9rbTuWchzm99Gm8z744IOTiNICwPy+U1dXl4wfPz6pqKhIFi1aVAhf8uuZJOn+VllZWdj/m2t+/OfPgfvss09hv33qqaeSiEguuOCCJElW7VxTSvlL2QfzX2rzAVVDQ0Ny8MEHJ6ecckqSJK0fQ821tZ+u6DzSfDu1d/wnSev7RynXmFtvvTWJiOSuu+4qTJM/ps8444yidcgfW80DwPyX+TfeeKMwj1tuuSWJiOSHP/xhkiSNx0HT4zn/z5p77rknSZLSrwetBYBJkiRnnXVWEhHJm2++mSRJ+qOqoqIiOfjgg1v9bPI/bCdOnFjYZqWcJ/L730EHHVTYb99///0kIpIjjzwyefHFFwt/5y1fvrwQAMybNy/517/+lUSkYVnz7d48AKysrEyqq6uTJEmSJUuWJEOHDi2cc5rvX009+eSTSUQabixZsqSkdUuSVbuO5QPdpvv1aaedVvg8OuI6lyRJctVVVyURkfznP/9JkiRp9xgtRXsBYH475sPbpgFge8tv7zr2y1/+MomI5Pe//31hfP7cNGHChCRJkuT2228vOgbyZRg2bFjJ14HWzg+taX6sd9Y5unmwsCrnoTV1Du+Iz7CUafLn/TfeeKMQtL388suF6VsLAFd0bijlHNOa5tefVT1PllLG/PWo6fGaL2N+mpU9xkv9PtzetbiUsrWm+X5ayvHR3j5WynFYyvW+6T70xhtvtDjv5f+ZtzIBYEcchwArss48A7CioqLN1n432mijov4DDjig8PeIESMiIq0K3txrr70W1dXVcd5558V6661XGL7ffvvFtddeG88880x8/vOfj4MOOih+97vfxVVXXRV9+/aNG2+8MUaPHr3SD63ea6+9YsMNN4yIiF69esUXv/jFwoOS//GPf0RExKabblrUKtWIESNavdWqLVdddVVceeWVEZHe7vDGG2/Ec889FxFRqKK+/fbbx29/+9vYa6+94uCDDy7c2taWQYMGrfQ2KKUcERGVlZWx/fbbF/q33377uOWWW6KmpiaWLFkS1dXVccQRR0R5ebqr9u3bN4488si44IIL2t0W//3f/134e7fddouIiOnTp0dExF//+teISKvrN28F7KmnnoqTTz45pk2bFsuXL48lS5bE22+/HVOnTo2ZM2dGRHqb6sCBA9stQ0S6737mM58p9H/+85+PBx54ID744IPYYost4ogjjojjjz8+nnrqqfjMZz4TkyZNioiIAw88cIXzzd9yksvl4vnnn4+IiFGjRhWtzyc/+cl49tln45133incZvzNb36zMH7ixInx4YcfxsYbbxyPPfZYq8vZZZddCn8fcsghce6558bLL78cH/vYx+Kdd96JCRMmxLPPPluYJr9dnnvuuRgzZkwMHTo0fvrTn8a8efNiv/32i89//vMrbGGz1P1zRZ9vqftfRPF2LOU4fO6552LixImx2WabFcYfffTRcfvtt7e5Tnl77713Yfv06NEjjjzyyDjmmGPirbfeis9//vNRUVERt912Wxx++OFRVVUVkyZNitNPP72w/5fqwAMPjB49ekREFFrXyz90f3XONSsq/1tvvRURK94Ht91224iI+MxnPhNHHnlk7LvvvkW3Lq+qFZ1HWjtOSzn+85ruH3krusZ87Wtfi6985StRVlYW06dPj7feeiueeeaZiGi577UmSZJ4+OGHY+LEifHJT36yMPwLX/hCRESLVlqb3nqW3775WxFX5jhozSGHHBIXX3xx/PGPf4wzzzwzHnvssaiuro7DDjtshe/bb7/9IiLdZrNmzWr3PJHfdkceeWRhv910001j1qxZMXjw4LjuuusiIoqe31teXh4HHHBAXHLJJfHqq68WbjPLLzu/PYYOHdridtwvfelLsf7660dERO/evWPbbbct3I7alunTp8f+++8flZWV8cc//jF69+5d0rptsskmq3Qd++c//xljx44t2q9/9KMfxTnnnBMDBgwoDFvd61xznXWMNpXfP1t7cH57y2/vOpZ/LMkhhxxSGP/xj388xowZE1OmTInly5fHF7/4xYiImDRpUnzxi1+M6dOnxxNPPBE/+tGPSr4O5LV2fihFZ52j81blPFTK94iO2D864jPMfydY0TR5e+yxR8yYMSP+/Oc/F8rflhWdG1588cWIKO0cU4qVPU+OGjWq3TK+8sorERFx6KGHFpVxwoQJhfPsyn6GuVyu5O/DK7oWl1K2UpRyfLS3j+WnW9FxmFfqb8p//etfEZEee3mjRo2K3XffvfCs1VKsqeMQ6L7WmQBwo402iksuuaTkafPKysqioqKi1da38s/BafpDPqIx8JgxY0ZERBx22GExadKkeOihh2L06NHx1FNPFX5UrYzNN9+8qL+ioqLwpeHdd9+NiDRIaE1NTU3hgr8i8+fPj5NPPjluuummwrDKysqiaS677LKYM2dO3H333TF58uSISL8g/e53v2tRxryV3QallCMi/cLWVN++fSMiff7F22+/HRFpy81NtVXG5pq2qNmvX7/CfCMi3njjjYhIA43m8st9+eWX44QTTognnniiMK6ioqKkZTfV/MHq+c8xX5YDDzwwjj/++Ljjjjti/PjxccMNN8T+++9fCIvbMnTo0MLf+S8sV199dasPVJ8xY0ZMnTq1xTrkcrnYeOON21zG7rvvHn369Cn05z/DN998s/Ajc8qUKbHnnnu2eG++TA888EB8/etfj9/97neF54kde+yx8dOf/rRo3nml7p8r+nxL3f8iirdje8dhVVVVvPHGG7H77ru3OY8Vyf9Az9tkk00iIuL999+P0aNHxzHHHBNXXHFFzJo1qxCSNv0yWaqm57Ty8vKic+DqnGtWVP5S9sFDDjkkXnvttbjwwgvjrLPOirPOOiuGDRsWN9xwwwqfF9WeFZ1HWlPK8Z/X2me7omtMQ0NDXHjhhXH55ZcXfiQMGzas1FWJ2traqK6ubrHcgQMHxtixY1s8I7VpWfLHU74sK3MctGbbbbeN0aNHx4033hhnnnlm3HrrrVFRURF77bXXCt/3sY99rPB3qeeJiGjRCnK+rB9++GGr43fbbbe45JJL4v333y98puPGjSuaZq+99op77723aFjza8r666+/wh/wNTU18aUvfSlmz54dzz//fOH9pazbql7Hnn/++RbbuW/fvoV9O291r3PNddYx2lT+M//EJz6x0stv7zr2/vvvx7Bhw1r80yT/jLDZs2fHZpttFscee2xcc801cfXVV8cf//jHiGg815Zyjswr9dzfXGedo/NW5Ty0ps7hHfEZljJN03JHRPz617+OiRMnrrBsKzo3rMw5phSrep4spYzNK0Z8/OMfLwRwq/IZlvp9eEXX4lLKVopSjo9Sv++u6DjMK/U35ZtvvhkR0eK7+yc+8YmVCgC78rsU0D1kqhGQvFKbY8+f1BcsWFA0vLa2NiIihg8fHhHpxb2ioiL++Mc/xl133RUREQcddNBKlyv/X6bWDB48OCIiXnrppfjwww9bvPJf7Nvz7W9/O2666aY45ZRTCrXMfv/73xdNM3DgwLjrrrti9uzZcdNNN8X+++8fjzzySJsX04iV3wallCNixdsk/8Om+edT6n8KV7QfVFZWxtChQ1vd1g888EAsXbo09txzz3jxxRfj0ksvjb///e+xcOHCOOGEE0padlMrWsd8Wfbff//4v//7v3j22WdjxowZ7dawiYiiL775/75ec801ra7TbrvtFoMGDYqIiLq6uqL5PPXUU4Uf2O2ZP39+RERsscUWhVoo3/zmN1td5llnnRUREZ/61KfipZdeitdeey2uvPLKGDNmTFxzzTXx85//vNVllLp/rujzLXX/iyjeju0dhxtssEFUVlZGVVVV0TyaP/y9VPntmf9Smq/NcO+998Yf/vCHGDFiRIwZM2al57uibdNR55rm5S9lHywrK4uf/OQnsWjRorj//vvjuOOOi3feeSd22223QsNLq6K9Y6y59o7/plqrfbmi7XvNNdfEueeeG7vuumvcfvvtMWPGjELNkVJaM+zbt29UVFQUtm1TCxYsiK233rrksqzMcdCWI488MqZOnRovv/xyXH/99XH44YdHr169VvieptuslPNEft9pfq6fOnVqTJs2rRAEtnWt3mKLLQrTNL8+tHa9WJn9paGhIb7xjW/Eyy+/HLfeemvR8VjKuq3qdWyjjTZqsQ8sWrQonn766Vi4cGFh2Opc51rTWcdoU3fffXdERKFG08osv73r2MYbbxwfffRRi/nma73lw5P8uXby5Mlx0003xYQJE2LLLbeMiJU7R65s7eym69mWjjhHr8p5aE2dwzviMyz1c45Ia02fdtppcd9997VoSKG5FZ0bVuYcU4qVPU+WUsb8PxaaniOal3FlP8OV+T68umUrRSnHR6nfd0v5vVjqb8r8PlfKub75P5yalqkrv0sB3UMmA8BSbbHFFhERceeddxYNv//++yOisZp8796944gjjoi77747Jk2aFHvvvXeLmghN5W8Fae0/RG3JfxH+xz/+ERtvvHFsvPHGMWjQoDj44IPjmGOOaXdZdXV1kSRJTJo0KSZMmBA/+9nP4gtf+EJssskm8eSTTxamybc2esQRR8RGG20Uhx56aNx1110xatSoQrX61sq/MtuglHKUYosttoiKior485//XDT8wQcfLOn9K7LDDjvEjBkzYs6cOYXtvWjRothtt93immuuialTp8bs2bPj1FNPjdNPPz0mTJgQffv2Ldx+19o65Ldb01tPSvWNb3wjZs+eHeeee25EROyzzz4r9f58LcNHHnmksD4bb7xxXHbZZbH33nvHnDlzCsFB09tpX3jhhdhpp53a/FL82GOPFbVEnN/22267beE/vXfeeWdsuOGGhWX+/e9/j9122y3+/ve/x2uvvRYjR46M3/zmN7HVVlvFySefXJhH85aPI6Kk/bM9q7P/lXIc7rLLLvHAAw8UhX6l3pbV/Nbn/A/hfM2MMWPGxIgRI+Lmm2+O++67r6hlzNY0Pf5LtarnmvbKX8o++J3vfCe22Wab6NGjR+yzzz5x9dVXF1oefe+991brGFqR5tupveN/deRv2bn99tvjoIMOis0226zQMmV++e1dI0aPHh333Xdf0Q+HadOmxRtvvFF0e9WKdNR5OP+PnjPOOCMiim+5K0Up54l88NL0OKqtrY3x48fHd77zncK+1byWTf6fUaNGjYodd9wxIqLwCIWIiDlz5hT1r4pzzz03/vSnP8X3vve9FrVxS1m3Vb2ObbfddvHII48UhYC/+c1vYvz48e3erpy3Kvt5e8fo6rrzzjvjnnvuiT322KNFmF3K8tu7jm2zzTZRXV1daCU8Ij2f3HnnnTF+/PhC6DJhwoQYOnRoXHbZZfHCCy/E4YcfXph+dc6Rza3J74NNrcp5aE2dwzviMyz1c45Iz6fnnXdeVFZWxre//e2Vqo3V1KqeY0q5TpdyLinFdtttFxFRFPBXV1cXPeJlZY/xVfk+vKpla03z/bSU42NVvu+urny58r8hI9J/2jT93pSvEZmv8RmR/iO16ffhjjgOAVaoS59AWKJ8K6LnnXdeq6/8Q5jzD2xt2mhBkqQP4M63nNe8FeD8Q85POeWU5Mknn0xuuOGGwgO78606JUnjw2EjIrn55ptXWN5nn3228EDzBx54oPDg3uYPRf7a175WeHDwvHnzkoqKiqSioiI5//zzk8mTJxdavsq3ztaan/70p4V5v/jii4WHx0+aNCl56aWXkksuuaRQ7nzrUd/73veSiEguuuii5Iknnig8TPmQQw5ptfyrsg1KKceECRNatHSbfyD5e++9lyRJY8uQZ555ZvLkk08Wyh4reHh6/uHo+QcuJ0njg3jzD4z+29/+lkSkLZhde+21yZ///OdCq2JvvfVWUltbW2gg5tFHH02efPLJQiuUEVFoRa7pA/STJCk87Pf6669PGhoaWl3HX//614Xl5C1evLiotcYVyT98vGnDNUmSJHvssUcSkbauNnny5OTCCy9MIiI59thjkyRJWwWsqKhIhg4dmtx7773J3XffXWjEoaamps3GFfbYY4/kiSeeSG666aakoqIi2XPPPQsPs//xj3+cRKQtkf3pT39KbrzxxqSioiIZNWpUsnTp0qShoSEZP358UllZmVx//fXJY489Vmgl7brrrmt1/drbP0v5fEvZ/1rbjqUch/l9Z5999kn+/ve/F1oSjyitEZATTjghmTJlSqG1xOYtQOcf5N/0OGhL8+N/RefAfCM+q3KuKbX87e2DDzzwQBKRNij08MMPJ7feemsydOjQokZwmh9DzbW2n7Z3Hmm+ndo7/pOk9f2jlGvML37xi8IDyl9++eXk1ltvLRzb+dYcm59jmzcCkm+MY4899kgee+yx5MEHH2zxEPvWjoOXXnqp6PxcynGwokZA8vINF+UbSWhL/uH2f/3rX4uGt3eeSJKk0BrtNddckzz66KOFffLZZ59N6uvrC5/PVVddlTz99NOFa/lRRx1VWE5+HqeffnpyzTXXFFoSbt4ISPNr8UEHHZQMHTo0SZLi/Sv/IPmKiork5ptvTm666aaiV6nrtirXsSlTphTm+/jjjyfXX399UllZWWg4oCOuc0nSshGQUo7RAw88sOjB+M3lGwEZO3Zs4XvaCSeckOy+++6F7dm0MYamjYC0t/z2rmP5ZQ8dOjS5++67k7///e+F4yD/meXlH+AfUdz6einnyLauw801P9Y76xzdvHGBVTkPJcmaOYd3xGdYyjTNW2jNt7Kb/y7RWiMgKzo3JEn755jWNL/+rOp5spQy1tXVFVq//c1vfpNMmTKl0J9vaKOUz7CpUr8Pt3ctLqVsrWm+n5ZyfLS3j5VyHJZyvW++j+Wvlddcc00yZcqUQmvJTVv4HT16dBIRydVXX53cddddhe/aTafpiOOwvfM00H2tMwFg/kLT2it/4X3kkUeSiLT13qYqKiqSY445JkmSlgFgbW1toZWt/GvixInJ7Nmzi+bRtIWupsFga2pra5Nddtml8IMpf9E+55xziqY75JBDir40vPjii0XrOmLEiBYtRTX35ptvFpqZP/7445MXXnihcOHIL/+Pf/xj0Ze+efPmFVrmyr8OOOCAwg+R5uVflW1QSjl22WWXFl8W8i1U/vvf/y4s86yzzip8cc1fnJt/YW8qf0HOf8ZJkiRTp05NItLWv/ImTZpU2HYRkeyyyy7JHXfcURh/6623Fr7cRaQtcl1//fVFP57Hjh2b7LHHHoX3NN2X3n///VbX8dprr00iInn77beLhh977LFJRHGLja3JBy/Nf3jMnj270Bpt/rg45phjirbTCy+8UPjykZ8m3xLiY489VvRlZ8KECUllZWUyfvz4omOj6fyWLVtW9CMqvy/lW+ZOkiR5/vnnkz333LNomrPPPrvNlm3b2z9L+XxL2f/a2o6lHIe33npr4ViIiOTEE09MIhpbX20uH6AddthhRfvciSee2GL5//nPfwr7Y3uaH/8rOgc2bUF1Zc81pZa/lH3we9/7XtE8Ro8eXfgBkSQtj6Hmmu+npZxHmm+nJGn/+G9t/yjlGjN//vyibVBRUZFcccUVyd57751UVFQk9fX1Lc6x+QDw7LPPLswz/+MvP5+xY8cmTz31VGF8a8dBvkXFW265JUmS0o6DpuewfAD4t7/9rWj98i2KNm11sTWTJ09OIiJ55JFHioaXcp6YNWtW4Qd8/vWTn/ykMP79998v/JjKb9dTTz216DxSU1OTHH/88cnQoUMLn8kee+xR+HHc1rW46Q/opvvXBRdcsMLvHqWu26pcx5Ikbe2y6T4wZsyYQsvQHXWdyx8r06dPLwxr7xgdOnRoUSjS3MyZM1vdXiNGjEiOPfbY5PXXXy+afvfddy8EgKUsf0XXsSRJkqeffrro2j1s2LDk+uuvb1HO119/PYlIg4Xm2jtHtnX9aK75sd5Z5+gFCxYkEZH8+Mc/TpJk1c5DSbJmzuFJ0jGfYXvT5I/ffIu5SZKGIs2P7+rq6pLODUnS/jmmNc2vP6t6niy1jFVVVUXn0jFjxiR77rlnUcjW3mfYXCnfh0u5FpdStuZa209LOT5WtI+VchyWcr1vvo/NnTu3xfqNGDGiKNx75plnCq3AR6QtFx9++OFF26AjjsP2ztNA97VOBIAdKV/jb8mSJUXDly9fnrz99tsthuctXbo0qaysLPxnqBRz5sxp98tha6qqqoq+jLenrq4umT17dtEPoTlz5iSzZs1a4fsWL16cvP76622uc/Pyr+o2aK8cpVi+fHny7rvvtvrfydX1wQcftFkLI0nSQKa9wLOpBQsWJAsWLFjpcuS/xNXV1a30e5uqra1N3n777RVuqzlz5iTvvvvuCpfV9L+5H3zwQdGX6Obq6uqSd999N1m8eHGb08ydOzd5++23S16/9vbPUqzO/lfKcfjvf/97pcvX0NCwwm2VD3HytXja09rxX6qVPdckSfvlT5L298H6+vrkjTfeSKqqqlodv6rH0Iq0tZ3aO/5X1eLFi5P33nuv3eNwRdeIhoaGZPr06cn8+fNXqyyrex7O/4h78803V6scpZwnFi1alLz55pttHleLFi1K3nnnnRbbdcaMGckNN9yQzJw5szCsvr4+GTVqVIsfpZ2hlHVbletYfh/48MMPV6t8K7uft3eMdrZSlt/edWzu3LltBlClWpVzZGvW1PfB5lb1PLSmzuEd8Rl2xOdcitU5x6zMdbqUc0kpampqVrhdVuUYX9nvw6tatta0tp+WcnyU8n23o1VVVSUzZsxIkiStNdo0AMybPn16u8fI6h6HAK3JJUmSRDdQX18fM2bMiKOPPjomT54cpa52dXV1vPrqq3HHHXfEFVdcEU8++WSMHz++k0u7drENOtdTTz0Vr7/+ehxxxBFxySWXFJ611dV23nnnqK+vLzwzjM41ffr0mD59epx33nmFB1WvTIMc0NFeeeWVmDlzZnz961+P7bffPv7yl790dZHa9M4778Tw4cNj//33j8suuyz69esX1113XZx33nlx4YUXxve+972uLiKwDnOOYVV84QtfiOnTp8err77a1UUBiIiIVWu+bB30yiuvxKc//emIiDjvvPNKft+MGTNip512ioiIk046qVsGX7ZB59prr72iuro6xo4du0qtDJMNf/nLXwoPsL7jjjuEf3S5iy66KG655ZaoqKiIX/ziF11dnBUaNmxYXHjhhXHRRRfFiBEjCsOPPvroteafKsC6yzkGgCzoNjUAq6qqYsqUKTF69OhCU/SlqKuri0ceeSQGDx4c22+/fcnNwWeJbdC5nnnmmaipqYmddtop+vTp09XFKfj3v/8dEY2t09G55s2bF//4xz9i1KhRhVaBoSu9/fbb8frrr8f48eNj8ODBXV2cktTX18cLL7wQCxcujDFjxsTAgQO7ukhAhjjHsDKmT58edXV1scUWW3R1UQAiohsFgAAAAADQHanKBQAAAAAZJgAEAAAAgAwTAAIAAABAhgkAAQAAACDDBIAAAAAAkGECQAAAAADIMAEgAAAAAGSYABAAAAAAMkwACAAAAAAZJgAEAAAAgAwTAAIAAABAhgkAAQAAACDDBIAAAAAAkGECQAAAAADIMAEgAAAAAGSYABAAAAAAMkwACAAAAAAZJgAEAAAAgAwTAAIAAABAhgkAAQAAACDDBIAAAAAAkGECQAAAAADIMAEgAAAAAGSYABAAAAAAMkwACAAAAAAZJgAEAAAAgAwTAAIAAABAhgkAAQAAACDDBIAAAAAAkGECQAAAAADIMAEgAAAAAGSYABAAAAAAMkwACAAAAAAZJgAEAAAAgAwTAAIAAABAhgkAAQAAACDDBIAAAAAAkGECQAAAAADIMAEgAAAAAGSYABAAAAAAMkwACAAAAAAZJgAEAAAAgAwTAAIAAABAhgkAAQAAACDDBIAAAAAAkGECQAAAAADIMAEgAAAAAGSYABAAAAAAMkwACAAAAAAZJgAEAAAAgAwTAAIAAABAhgkAAQAAACDDBIAAAAAAkGECQAAAAADIMAEgAAAAAGSYABAAAAAAMkwACAAAAAAZJgAEAAAAgAwTAAIAAABAhgkAAQAAACDDBIAAAAAAkGECQAAAAADIMAEgAAAAAGSYABAAAAAAMkwACAAAAAAZJgAEAAAAgAwTAAIAAABAhgkAAQAAACDDBIAAAAAAkGECQAAAAADIMAEgAAAAAGSYABAAAAAAMkwACAAAAAAZJgAEAAAAgAwTAAIAAABAhgkAAQAAACDDBIAAAAAAkGECQAAAAADIMAEgAAAAAGSYABAAAAAAMkwACAAAAAAZJgAEAAAAgAwTAAIAAABAhgkAAQAAACDDBIAAAAAAkGECQAAAAADIMAEgAAAAAGSYABAAAAAAMkwACAAAAAAZJgAEAAAAgAwTAAIAAABAhgkAAQAAACDDBIAAAAAAkGECQAAAAADIMAEgAAAAAGSYABAAAAAAMkwACAAAAAAZJgAEAAAAgAwrjzi/q8sAAAAAAHSSXJJE0tWFAAAAAAA6h1uAAQAAACDDBIAAAAAAkGECQAAAAADIMAEgAAAAAGSYABAAAAAAMkwACAAAAAAZJgAEAAAAgAwTAAIAAABAhgkAAQAAACDDBIAAAAAAkGECQAAAAADIMAEgAAAAAGSYABAAAAAAMkwACAAAAAAZJgAEAAAAgAwTAAIAAABAhgkAAQAAACDDBIAAAAAAkGECQAAAAADIMAEgAAAAAGSYABAAAAAAMkwACAAAAAAZJgAEAAAAgAwTAAIAAABAhgkAAQAAACDDBIAAAAAAkGECQAAAAADIMAEgAAAAAGSYABAAAAAAMkwACAAAAAAZJgAEAAAAgAwTAAIAAABAhgkAAQAAACDDBIAAAAAAkGECQAAAAADIMAEgAAAAAGSYABAAaKGuLuL99yM++CCioWHV3t+e+vqOmU8p0wAAQHcmAAQACmbMiDjiiIiePSOGDo3YbLOIAQMiTjstorp6xe9taIj45S8jhg9P39+/f8Q3vpGGiE09+mjEwQdHlJdHjBwZcemlxeOrqiJOPTViyJB0PptvHnH++RFLlqzcNM3ddVdELpd2V8bzz0fcccfKvae5//wnXfYxx6zefJqrrY246KLG9X788XQ5Dz3Usctpz8KF6XIvvHDl33vTTel7d9659fHjxkV87nPp3w88kE77xBOlz3/ZsvQ955xT+nsuuCB9z+LFbU/TfNvTtqeeSrfnvfd2dUkAoPsSAAIAEZGGf7vtFjFrVsTtt0fMnBnx7rsRP/95Gizts8+KQ8D//d+IE0+M2GSTiKuvjvja19JwZ5990hAmv4w99khDwZtuivjMZyLOPLM4BPzGNyKuvDJi990jfv3riO22izjvvIjTT1+5aZpLkuJuKZYti9hxx4iXXir9PSta9qrUplyRK66IOPvsxtqUFRUR48en4euatDrrd/31aXfKlIhXXmk5vr6+5XxX5jMsK0u3yaablv6eUvaV5tue9q3M5wYAdCwBIAAQM2em4d+OO0b8+c8RH/94xO9/H3H33RE77RTxyCNpGLbPPmnNp9b89KdpAPXYYxHHHRdx7bVpLb2XX4547rl0mjPPTLv33BNx6KERN9wQsffeET/+ccTSpWngeM89EQcdFPGHP6Q15u69N2L06LR24fLlpU2ztskHcoMGdex8mwdjY8ZEPPlkGnitC955J91fLrgg7c+HgR2pvDzdJscd17Hz7egwFwCgMwkAAYC4446IwYMjbr454pZbIv7rvyIefjjiH/+I2GqrNKh58MGIN99s/fbLJUvSQO9nP0sDl7zRo9PunDlpYHLPPREHHBAxcGDjNF//elqz8J//TJ/nd+qpESedVDz/7bdPuwsXljZNKZ57Lr299JFHIr70pfQWxZEj0xAxSdLbP/O3pV57bcSuu6Z/52913m679D077BBx662N862tTed77bXpNJtvHvGXv0RUVkaMGpVOc9116TLvuy8NWHO59D2PPto4n+XL01pm48al43O5iH33jXj99XT81VdHXHVV+veuu6Zh6AsvpNM/9VTjfO64o3EeI0dGfP/7jbetLluWjrvttjRIHTIkfZ14YvHtr3PmpNt7m23S+fTvnwZq8+eXtq3bcsstaff//b+IL34x4je/WfFtt+15+ul0fW68MV2PHXZo/JyvvrpxuieeSG9D798/nebOO9Np/vzn4vndc086PpeL+OxnI/72t3R4a9s+Ih02cmTjtj777DTYbsu3vpXeuv7Vr6ZlOeOMdPjbb0d8+cvpsCFD0hqvs2Y1vq+2Nv2Mhgxp3Hd+97viebc3j2uuifjv/464/PJ0mr33Tudz3nnF8/noo+LtV1MTcfLJ6a3+/ftH7LVXut819cQT6bLzt3ZPmdL2NgAA1pAkicTLy8vLy8ure7/23z+SSy+NpK4ukohILr64cdwVV0Tym9+kfx9+eCRnn136fPfcM53fO+9EMmtW+veZZxZP87e/pcPvuqv1edTUpOOHDm17OaVMc+ed6TR33pn2//WvaX9EJKNGRXL66ZEMG5b2339/JEuXRnLOOWn/hAmRXHhh+r4zzmh8z4UXNq7jr39dXJaISCor0zLddVckTzyRboMkieS88xqn2XPPSE49tbF/zpx0mvPPT/v32SeSiy5KP6OISEaMiKShIZLJkyPZZZd02Pe+F8mTT0by6KNp/wMPpPO44Ya0f8yYSC67LJJDD22cZ5JEUlvbuNyKikhOOqlxnk0/p/ywo46K5Mc/jmT8+LT/hBPS8VVVaf+PflT6vlFfn26bMWPS/ltvTedxyy3F040ZE8nuu6d/339/Os3jj7c+zwcfbFyfESPSdfrPfxq3UZJE8uabjdOcdlokX/taY/9vf9vy8znkkEiOPrqxf/bs1rf9vfc2fp6XXhrJAQe0vr83feW3Y0Qko0en+8GMGWm5I9Ll/vCHjfv2okXF++BRR6XH6tixaf+996bjS5nHD35QvOwxYyL54hfT9y1Z0ljGa69Np3nyyfT8kF9Wfr/MHzMvvdS4fSsq0uGXXBLJgQc2LufPf+76c52Xl5eXl1d3fUVXF8DLy8vLy8ura1/Ll6c/zp9+OpLXXkv/fv311qe98cY0AChlvr/5TWMAkSSRvPVWy3AxSdLgICINGprPo6EhksMOS8fffHPryyllmiRpOwA84IDGafLrf/rpaf/SpWn/D36Q9r/3XtqfD6SSpDEUqahIA7V8AFhZmQZj+TI2LUs+YPrFLxqHXXddOuyee9LpR4xIg5m6usZpDjoonWbevLQ/HxLW1KT9TQPAurq0TCNGpOXKz+OYY9JpJk9uDAArKyOprk7HL1mSBkWjR6f977yTTnPqqcX7TEVFY3i3KgHgE08Ub4Pq6rR/l12Kp1uVAHDixMbtnv8M8wHgwQen/W+80XK7Ng8Am+6rl12WDnvssda3fT6Uywe4DQ3psk45pe1tkA8An3mm8T3HHZcOe/TRxuny4eKVVzZuk/znkySRzJyZbrcbbkj7S5lHPgC84orGZd9+e3GQmCTpfIcNS8ffdls6/vzzG8d/9FE6bP/90/5DDkn7Z8xonCa/zQWAXl5eXl5eXfdyCzAAdHO5XPrsvrq69BWRNpyQt2RJ4/DlyyN69Wp/nr/7XXp74+jRjQ189OzZuLym8svKNxSSV1cXcfzxaWMhhx8eccghLZdTyjTtOeCAxr9HjEi7bTV28vzzaXfUqPQW6YcfTm/b/eQn0/e8807jtJ//fMQGG6R/N1/nvH33bfx7223Tbk1NOv20aentq0uWpM9RvO229FmNEW0/h7Gp115Ly/Ttb0est17j8P32S7vPPNM47Etfilh//fTv3r3TstTUpP1bbJE2dHHppRFz56a3F19/fUSfPo3TrIqbbkq7X/hCettvWVl6W+4TTzTe5ryq8uvY2nb/xz/Sz/mTn2w5fXP//d+Nf++2W9qdPr31afOf32c+E3HJJWmDJn/4Q3pb/IpUVESMHdtY3r/+Nf172bLGfSwvf2v39tun+8Ree6W3TdfXp7cnH354Or6UeeTl98FcLr0NOyJi0qTGdX3iibRl8Fwu3XYRaYMq+fm++GK6PR95JB333HMREyemLYjnHX30ircBAND5ytufBADIsh490hDm8cfTZ3tFRPzpT43PIzv66PQ5ZrffnoZde+654vn97GcRp52WNkjxwAMRAwakwzfcMO0uWFA8fb6/oqJx2LJl6TMFJ02KOPLItKXf5mFOKdOUYqONGv8uK0vL0VbLrv/+d9q9+uriZ8rlzZiRNqASEfGJT6zcsvv0Sbv5Zb/8csQJJxQ/c7HpNmpP/nlvTYOYiIhddmksa96QIcXTrL9+cSB7111pC8tNA86Ixs92ZS1alD4jMaI4iMu74YY0RFtVH/tY68MXL07X+9hji4d/5jOtT9+05eB+/dJuW41/HHJIGrpeeGHEWWelr2HD0nXJPz+yNVttVdz/xhtpd++9W0779ttp97LL0ucy3n13xOTJ6bA99kiD9803L20eeUOHNv7dt2+6ba65Jt2///jHdPjXvpZ233037bYV6FVVpcveffe2lwEAdA0BIAAQn/1s+qP/5JPT7rHHpi3rLlkS8eyzabDx4Ydp4HDXXW3P59JL05Z+99gjbVghXwMuIg0XKipa1qDKB1Gbb552ly5NG0W4++40hLzoouIaiaVOU6qVeV++8ZJrronYf//Wx+drS5ZSU7KtZS9dmgattbXpNv3MZ9LalBddlL5KkQ8Xmweu+dqDw4c3DuvRo+35vPRSxIEHprW8rrsubRTjU5+KmDAhrRG4Ku65J+0ee2zaUEpT554b8atfpY1jNK25uDLK2/iGmw9ZmzcU01ZNxpXZN8rKIn7yk7SRlccfT9fxV79Kaw7W1ra9Ls33k8rKdNizz7acNl+LduDA9Dj86KO0cZ477kiPhaOPbmxwpr155DXfVoccku7fkyentTQnTIjYcst03ODBafell9JlNLfBBunwqqri4atTUxQA6BhuAQYACjV8vvzliG9+M+LJJ9PbC48+Og3+NtwwvaV1990ba5A1d/PNafh3wAER999fHP7l7bVXGlzkW6GNSIPCiMZWfP/nf9Iw45JL0ldrIUwp03SEfI3C5cvTbr621iOPRGy8cePrssvS2lZz5nTMcqdOjZg9O23t+PTT0xCmb9+Iv/89HZ8PGfPly/c3tcUWaTe/ffPuvz/t5m9ZbU++BuJVV6W3dW+/fRqgPftsy9u2S5VvsfbCC9MQsOnryCPTW5fzIWFHyuXSz+mee4pbMM7f8rqy84po3Pbf+U7aSnKPHhH77JPWoDv11HTce++VPt8ddkhD8TlzGvevRYvSIPGaa9Ll7bprelvuRhultWDvuiu9LT1/i25781iRCRPSGnuXXZa27pu/rTiisRXrf/yjcb6DBqW3bh9zTDpul13Smr9NQ7/87cEAQNdRAxAAiIED0+d57b57GvQdfnhaw662Nn2O2dVXp4HS7be3rEEUkQZCxx+f/j1sWMTFFxeP/+//Tmt6nXxyelvhIYekNfceeyyd55VXpreTPvVUxG9/m9YUrK1Na4E1ddJJ6bPx2psmX1NvdeXX9Z57IkaOTLfLHnukZa6oSIPTZ5+N+OlP0/Bqs83SoGV1bb11Ov/bbktrZ663XnqLcz6My4cr+ef2XX55xFe+UjyPior0NtSLL06DqK9+NX223sknp7UJd9qptLJMmJB2r7kmDXvmzIn4wQ/SYc1rF+bNnJkGwRMnRpx9dvG46dPTWmsHHND653TooWmZr7km4qCDSivjyvjBD9KQavfd0yD59dfbf05fa5pv+z33bNwPDj00rZ03aVJaIy7/bMlSfO97aYB24IER3/1uGrL96EfprbVf+1paY2/nndOaoCNHpn+/9FIaGuefgdnePFakrCwN/s89N+0/8MDGcd/6VjqfM85IA+rPfCbixhvT/fLWW9NQNH+MH3xwWhvyzTfTfww0taL9AwDoJF3dComXl5eXl5fX2vOaOTOSCy6IZMKEtNXOiEj23jttKXTRorbfN3ly4/StvW66qXHaa65pHF5REclJJ6WtyiZJJD/84Yrn8+67pU3TWhn/9Kd0/J/+lPY/8khja7hNp6uoSFvKzfefeWbjvN9/P5LZsxtbNY3/vwXdY45pbJl30aKWLaU2f+VbkG26TV9+OR12yy1p/623xv/X3h2zxBGEYQCeVFYJwSJVwFQWFhaCYCMR/4GFlaWdYK/Cdbb+ABsLGwsFsUxjkTKVTUSQQFSIoCiIRVKom+JzOcU7NUZRPp8HBvZml7292eWKl535qp6e5vcMDZVqYSG25+fjmO3t+P5SSjUxERVqS4lquFUVVX6vXn+5rJB7cNDcX0qpGo3r1zc6GpWA68/T083vKZeVnaemYvv791KdnMT27Gwcv7MTn8fHb/72ubnYt7LSfnz6++OYHz9ie3g4+usqwF+/3v4crq83++oqwDMz14+rn/G+vrjuq2Pf6v5sbkbf4mLrsW81Tr29MT7tfufAwM2qx1VVquXl6+cZHLw+XsfHzWq7dRsZKdXR0f3P0WhE/9UK0XXb2op9o6M3921sxJjV5+3uvln9eWkpKgfXx0xONitc3/V8aJqmaZr2NO1NVZXqgdkhAJDY6Wm8bVSvm/aYzs6ioEZXV/v12l6Ses24d++afX/+lPLrVxT7eKopyKXEG3Pv37cvAHJ+XsrxcbxN124sz85K2d2NNxQ7Oh52HRcXMZX148f7rW/4Uq2txVgODzf7Vldj+vuXLzFN/b5ajf3FRRTa+PCh9TT4f7G/H/ers7P1/t+/475++tT+vt51joc6OYn/iNsKfOzuRoGZhz5zAMDjEQACAPBqjI+XsrAQod/nzzGlfGwspuzu7f1/aAcA8BIJAAEAeDV+/oz16a5WyH37Ntaj7Ot7rqsCAHhaAkAAAF6dw8NSvn2LojXd3VG9FwAgKwEgAAAAACT2hEtWAwAAAADPTQAIAAAAAIkJAAEAAAAgMQEgAAAAACQmAAQAAACAxASAAAAAAJCYABAAAAAAEhMAAgAAAEBiAkAAAAAASEwACAAAAACJCQABAAAAIDEBIAAAAAAkJgAEAAAAgMQEgAAAAACQmAAQAAAAABITAAIAAABAYgJAAAAAAEhMAAgAAAAAiQkAAQAAACAxASAAAAAAJCYABAAAAIDEBIAAAAAAkJgAEAAAAAASEwACAAAAQGICQAAAAABITAAIAAAAAIkJAAEAAAAgMQEgAAAAACQmAAQAAACAxASAAAAAAJCYABAAAAAAEhMAAgAAAEBiAkAAAAAASEwACAAAAACJCQABAAAAILG/WTMoiz93V3cAAAAASUVORK5CYII=\"\u003e" }, "metadata": {}, "responses": [], "suggestions": [ { "agent": null, "question_name": "correction", "score": null, "type": null, "value": "```json\n\u003chtml\u003e\n\u003cstyle\u003e\nbody {\n font-family: Arial, sans-serif;\n margin: 0;\n padding: 0;\n border-box;\n}\n\nheader {\n background: #ff0;\n text-align: center;\n padding: 20px;\n}\n\n#video-container {\n width: 70%;\n margin: auto;\n text-align: center;\n}\n\n#video-player {\n width: 100%;\n height: 300px;\n background: #f0f;\n}\n\n#chef-tips {\n margin: 40px;\n}\n\n#chef-tips p {\n color: #000;\n line-height: 1.6;\n}\n\nfooter {\n background: #ff0;\n text-align: center;\n padding: 20px;\n position: fixed;\n width: 100%;\n bottom: 0;\n}\n\u003c/style\u003e\n\u003cbody\u003e\n \u003cheader\u003e\n \u003ch1\u003eInter\u0027national\u003c/h1\u003e\n \u003c/header\u003e\n \u003cmain\u003e\n \u003csection id=\"video-container\"\u003e\n \u003cvideo id=\"video-player\" src=\"#\" controls\u003e\u003c/video\u003e\n \u003c/section\u003e\n \u003csection id=\"chef-tips\"\u003e\n \u003ch2\u003eChef\u0027s Tips\u003c/h2\u003e\n \u003cp\u003eEnjoy the tasty and healthy recipes shared by the best internationally recognized chefs. Discover the latest cooking trends and techniques.\u003c/p\u003e\n \u003c/section\u003e\n \u003c/main\u003e\n \u003cfooter\u003e\n \u003cp\u003e\u00a9 2022 Inter\u0027national. All rights reserved.\u003c/p\u003e\n \u003c/footer\u003e\n \u003c/body\u003e\n\u003c/html\u003e\n```" } ], "vectors": {} } ``` While the same record in HuggingFace `datasets` looks as follows: ```json { "accuracy": [], "accuracy-suggestion": null, "accuracy-suggestion-metadata": { "agent": null, "score": null, "type": null }, "correction": [], "correction-suggestion": "```json\n\u003chtml\u003e\n\u003cstyle\u003e\nbody {\n font-family: Arial, sans-serif;\n margin: 0;\n padding: 0;\n border-box;\n}\n\nheader {\n background: #ff0;\n text-align: center;\n padding: 20px;\n}\n\n#video-container {\n width: 70%;\n margin: auto;\n text-align: center;\n}\n\n#video-player {\n width: 100%;\n height: 300px;\n background: #f0f;\n}\n\n#chef-tips {\n margin: 40px;\n}\n\n#chef-tips p {\n color: #000;\n line-height: 1.6;\n}\n\nfooter {\n background: #ff0;\n text-align: center;\n padding: 20px;\n position: fixed;\n width: 100%;\n bottom: 0;\n}\n\u003c/style\u003e\n\u003cbody\u003e\n \u003cheader\u003e\n \u003ch1\u003eInter\u0027national\u003c/h1\u003e\n \u003c/header\u003e\n \u003cmain\u003e\n \u003csection id=\"video-container\"\u003e\n \u003cvideo id=\"video-player\" src=\"#\" controls\u003e\u003c/video\u003e\n \u003c/section\u003e\n \u003csection id=\"chef-tips\"\u003e\n \u003ch2\u003eChef\u0027s Tips\u003c/h2\u003e\n \u003cp\u003eEnjoy the tasty and healthy recipes shared by the best internationally recognized chefs. Discover the latest cooking trends and techniques.\u003c/p\u003e\n \u003c/section\u003e\n \u003c/main\u003e\n \u003cfooter\u003e\n \u003cp\u003e\u00a9 2022 Inter\u0027national. All rights reserved.\u003c/p\u003e\n \u003c/footer\u003e\n \u003c/body\u003e\n\u003c/html\u003e\n```", "correction-suggestion-metadata": { "agent": null, "score": null, "type": null }, "external_id": null, "html_code": "```json\n\u003chtml\u003e\n\u003cstyle\u003e\nbody {\n font-family: Arial, sans-serif;\n margin: 0;\n padding: 0;\n border-box;\n}\n\nheader {\n background: #ff0;\n text-align: center;\n padding: 20px;\n}\n\n#video-container {\n width: 70%;\n margin: auto;\n text-align: center;\n}\n\n#video-player {\n width: 100%;\n height: 300px;\n background: #f0f;\n}\n\n#chef-tips {\n margin: 40px;\n}\n\n#chef-tips p {\n color: #000;\n line-height: 1.6;\n}\n\nfooter {\n background: #ff0;\n text-align: center;\n padding: 20px;\n position: fixed;\n width: 100%;\n bottom: 0;\n}\n\u003c/style\u003e\n\u003cbody\u003e\n \u003cheader\u003e\n \u003ch1\u003eInter\u0027national\u003c/h1\u003e\n \u003c/header\u003e\n \u003cmain\u003e\n \u003csection id=\"video-container\"\u003e\n \u003cvideo id=\"video-player\" src=\"#\" controls\u003e\u003c/video\u003e\n \u003c/section\u003e\n \u003csection id=\"chef-tips\"\u003e\n \u003ch2\u003eChef\u0027s Tips\u003c/h2\u003e\n \u003cp\u003eEnjoy the tasty and healthy recipes shared by the best internationally recognized chefs. Discover the latest cooking trends and techniques.\u003c/p\u003e\n \u003c/section\u003e\n \u003c/main\u003e\n \u003cfooter\u003e\n \u003cp\u003e\u00a9 2022 Inter\u0027national. All rights reserved.\u003c/p\u003e\n \u003c/footer\u003e\n \u003c/body\u003e\n\u003c/html\u003e\n```", "image": "\u003cimg src=\"data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAABQAAAALQCAYAAADPfd1WAAB/xElEQVR4nOzdeZxWdd0//vc1DCDgIIsOLlgJBoqJicKN4ZalpoXeZlpf03JJU2/3cskyl8rd8q4009JSbzUx09xSzCUjd1NTFPcCFZBlYAaGZWbO74/zu66ZaxbmAmYYOPN8Ph7X48xZrnM+51xnua7XfM755JIkkgAAAAAAMqmsqwsAAAAAAHQeASAAAAAAZJgAEAAAAAAyTAAIAAAAABkmAAQAAACADBMAAgAAAECGCQABAAAAIMMEgAAAAACQYQJAAAAAAMgwASAAAAAAZJgAEAAAAAAyTAAIAAAAABkmAAQAAACADBMAAgAAAECGCQABAAAAIMMEgAAAAACQYQJAAAAAAMgwASAAAAAAZJgAEAAAAAAyTAAIAAAAABkmAAQAAACADBMAAgAAAECGCQABAAAAIMMEgAAAAACQYQJAAAAAAMgwASAAAAAAZJgAEAAAAAAyTAAIAAAAABkmAAQAAACADBMAAgAAAECGCQABAAAAIMMEgAAAAACQYQJAAAAAAMgwASAAAAAAZJgAEAAAAAAyTAAIAAAAABkmAAQAAACADBMAAgAAAECGCQABAAAAIMMEgAAAAACQYQJAAAAAAMgwASAAAAAAZJgAEAAAAAAyTAAIAAAAABkmAAQAAACADBMAAgAAAECGlXd1AQCA7uPVVyM++qh42NixEf36dU15utqiRRHPPls8rF+/dJtkxVtvRcyY0dify0VMmBBR3s2+hT73XERNTWN/RUXEDjt0XXkAgO4llySRdHUhAIDu4QtfiHjwweJhTz4ZMX78mln+lCkRf/xjxE9/umaW156nnorYaafiYZWVEbNmdU15VkVDQ8TNN0fU1kZ8+9stxx9zTMR11xUPmzEjYrPN1kz51habb14chA4bFvH2211XHgCge3ELMACQedOnR3z96xE779wygGTVPfVUGt5+85sR06Z1dWkAAGhLN7v5AgDobi69NOLMM7u6FNlSXR1x4okRv/99V5cEAIBSqAEIAGTaj3/c1SXInunThX8AAOsSNQABALrIkCEth33842u+HJ3p4osjzjijsT+Xi9h0064rDwBAdyQABADWKfPnRzz/fNqK6qc+1bktCH/4YdpycZ8+EaNGRQwcuOrzWr48bfG3tjZixIi0UYhPfCJdj+rqxulGj165+dbWRvzznxFLl0Zsu23Ehhuu3Hs//DBi5syIefPSbTloUMQmm6SNkXSEQYPS18qaPz/iP/+J+OCDiA02iNh669Xb/nn19RGvvZbWYtx66zRwzeVKf//s2en2mjkzndfAgelr2LCInj1Xv3wAAJ3BLcAAwFpl+PA0kMm/LrssIknS7jbbpGHSnnumjU+sv37EuHFpANbckCHp+5uGaxERU6c2znvnnVu+b9GiiFNPTd+/6abpsnbeOV3u5pun4xYtar3sCxcWlz2Xi7j77oirr47o1StiwoSIz38+4mMfS1tErq9v2QLyNtsU9x93XPH8vvSldPh996Xl6ts3ne8ee0RstFFaxttuW/E2njYt4vjj0/cOH56+f+LEdB6f/nS67jvs0HI+r7+elqF5GSMifvazxjKec07j8FNPbblN2mrlOEnSVpq/8IV0e3/60xH77puWL7/9v/WtiI8+anvd/u//ipfVv39EXV26zvvtl4Z1226bzneLLdJw8fvfTwPattTVRfzudxHbbZdum+22i9h773QeO+0UsdVWEYMHp2X78MO25wMA0FUEgADAWqWmprh/3ryIb3wjvY106tSW0z/7bMSYMRGTJhUPr61tf1nz5xf3//Of6byuvDKt6dXcjBnpuDFjIl56qeX4+vqWw37724j/+Z+Wwz/6KKK8PA3amtp66+L+5mHjwoURv/hFGgROmdJ6Gf/f/4s47bSW4yLS2pNbbRXxq1+1Pj7vhRfS+Zx6auOwZctW/J7WytxaWFpX13LY/PkRBx4Y8ZWvtN1S84wZ6fYcPjzizjtbn2bp0uL+6uqIxx6LGDs24p57WgbC1dURF14Yseuure8z9fURRx4ZccQRES+/3Poy8/P57W8jRo6MeOuttqcDAOgKAkAAYK128cURN9/c/nQnnFB6QNWaWbMidtst4o032p/2jTfS2mkzZrQ/7T33tD78iCPS7imnRDz9dONr111XPL8nnog46aT2l/uzn7UMTBcujPjsZ9t/b1NXXpmGgZ1p2bK0Rt2f/lTa9NXVaVj461+XNv2ee7YM/pp76qnW97PLL4+46abSlpMv23e/W/r0AABrggAQAFhnfO1rEeeem9bAa2727OKaY5tv3vZz7Cor01fTBjfOOadlSFRREXH22WkNsdaezXfJJSu/Dnlf/nLaHTIkvY05/+rbt/R57LFHxPnnR3zxi62Pv/764v6bbmq5jpWV6e3Av/xlxJlnpuvc3OTJabdPn3T61qbJz6uyMr0ddmWcd15ak7O5iop03XbZpfX3HXts+pzAUo0enX6eRx7Z+vhf/rK4f8mSiJ/8pOV0e+8dcdFFET/9aRouNnf33asXRgMAdDSNgAAAa72KirThhs02S/vPPDN9dl7zWzKnTUufZReRNt4RkT4DrmnoNWpU47i8f/4z4rrriocNG5beYrvxxmn/aadFHHRQcY2+X/4y4jvfSRvzWJHKyojf/Ca93ffhhyPefXf1W8J94IH0WXl5xx0Xcc01xdM0rwH49NMt5/O3v6W3reZttVVj7cS8999Pu5/8ZFpTcurUls8BPPXUNBBbWXPmpGFac4cfHnHVVY2B6LRpaWjafJ2OPTbi/vvbX87ZZxeHeXvumd7i3NTLL0c0NESUlTX29+lTvP8ceWR6q2/eySenjaY0v2V83rzGfQcAoKupAQgArPVuvrkx/ItIQ5nTT2853cKFqzb/Rx5pOezCC4sDnN69I37+85bT3Xpr+/P/3/9Ng8lNN02fZ3juuatWzrzTTisO/yLSULS55s84vPHGtLGL119PG9t47LHi8C8iYvfdW86nqmo1CtuO1hosGTs2Ddma1oYcOTKtWdfcAw+0/rzG5vM7//ziYV/7WsTQoS2nXbKk8e9x49LAc+7c9Nbrm25KbwluqqwsYv/9W86nvVuOAQDWJDUAAYC1Xmu3WQ4b1nJY8wYgStXac/+GD09rpzW1/vppbcSm4c706e3Pf7/9Vq1cbWntlt+PfazlsNZuQy0vT8O0fPC3YEH6jL/nnot4/PG0deHmmoZiHe2xx1oOO//8xlp4TW25ZcRRRxXXwItIa4e2dbt3RLq9ylv51rv11i2f47h0acvbsAcNSltc3nnntIbgtGnp9nrmmYh77414552W8y6lERoAgDVFAAgArNUqK9Maf821NmxVvfZay2Fjx5b23vYaAhk6dOWe61eKzTdvOay1wKwt06alNe/uvHPFLdvm5XKlz3tlvflmy2Hbbtv29Ntv33LYK6+kDbi0pbVwNKL0z6W2Ng1Gb7st4qGHSqvd16NHafMGAFgTBIAAwFpt/fVbH96rV8ctY9q0VX/vv/+94vFbbLHq825LW+Fn89qJrbnhhrYbwWhLa7XnOkprtQuHDGl7+taeq9deQyBtba/evVf8voiIjz5KaxC21kjJiqxMIAsA0Nl8NQEA1mrrrdf68I6slbY6jTXU1Kx4fEfX/osoLbhqzdVXtx3+7blnxAUXtH5LbmeGWYMGtRy2omf6zZzZclh7IWtb+1B76zV/ftrYTGvh39ChEUcfnQaqxx238vMGAFiT1AAEALq9kSNb3go7a9aKnytXqrbCp9WxquHn//5vy2HXXRdxyCGNQWV9fcctrxSbbx7x1FPFw159tbjRl6Zau2W5eYvEza1q+R9+uOXz/XbfPW2deNSoxmEffNDyvQJAAGBt4qsJANCt1NW1HNZagyJ/+1vLYe+9F/Hd76YB0F//GvH++xFJsuLldeStyqvjP/9p2djJMcdEfOtbxbUU3323/Xm1Fm4tX75q5Ro3ruWwc89tfbu++27Etde2HL711qu27PY8/HDLYTffXBz+RbT+HEMAgLWJABAAyLTmz3/78MPG1oLzreR+6Ust33fqqenz35r67ncjrrgi4oQTIj7/+fQ20BNOWPHy15bGIFp7VmHzmncNDREnndRyuuatK7f2TMCmrSG31vpwW7761dbLdcIJxc8HfOediK98peW0++8fseGGpS9vZbz1VsthL7xQ3P+Pf0T87nctp1uZbQAA0NkEgABApg0YUNxfXR0xenTETjs11hzbeeeIXXYpnm7GjIhPfSoN/G64IeLLX4744x9bzv9//qdTit3hRo5sOezllyOOOiri0Ucjfvvb9PbWBx5oOd38+cX9/fq1nObuu9PtOHJk+6FoU5tvHnHYYS2HX311xMc/HnHwwRFf+ELE8OEtw7eKirQ2ZmcZPbrlsOOPT2shPvpoxGmnpWVrTVVV55ULAGBleQYgAJBpn/50y1tfm/bX16e19C6+OGLChOLpZs9Oa/215fjjW94OuraqrIwYM6ZliHb99elrRV56Ka0dmL/1d6ONWm9xeMqUtLuyLR//8pfpe5s/b2/27IhJk9p+3//+b9vPCuwIe+0VceWVxcNmzIj49rfbf++0aS33JwCArqIGIACQaV//+orH52/z/cxn0qBr6NDS5nvYYa03qrE2u+mm9qepqGg9CG16u3B5edoCbltKeY5gU/37Rzz+eNoScSkqKiL+9KeII45YueWsrH32iTj88Pan23//lsNuu63DiwMAsMoEgADAGtPas+Pae0ZeW63otva+1oZNnJjeTtqaESOKnzM3enTEiy+moU9rDYNEpLXobrstvS24+fq0tvyePVufT6lKXc+Ils87bD7dqFFpC7t77NH6+7/2tfS5d60Fm3/5S3H/j34Uceyxrc+naYha6mc+dGi6jJtuSm9Fbk1FRfrZvP12xH//d+vTtLa81oZFtP7ZNC/btddG/Oxnrb9/6NCI22+PuOuudD9ravLkiJqaxv7mjcG0VSYAgM6QS5Jop+06AIB1X21txOuvp7eZfvzjaRjWtPXb1lRVRUydmjYcstlm6fPqNt00IpdbI0XuVB98kG6L2bPTIHSrrVYtlJo/P91Gs2en8/jkJzsm3Jo1K211+YMP0hqC22wTsfHGqz/fVVVbm26vd95J95vttuu8xkcAADqaABAAAAAAMswtwAAAAACQYQJAAAAAAMgwASAAAAAAZJgAEAAAAAAyTAAIAAAAABkmAAQAAACADBMAAgAAAECGCQABAAAAIMMEgAAAAACQYQJAAAAAAMgwASAAAAAAZJgAEAAAAAAyTAAIAAAAABkmAAQAAACADBMAAgAAAECGCQABAAAAIMMEgAAAAACQYQJAAAAAAMgwASAAAAAAZJgAEAAAAAAyTAAIAAAAABkmAAQAAACADBMAAgAAAECGCQABAAAAIMMEgAAAAACQYQJAAAAAAMgwASAAAAAAZJgAEAAAAAAyTAAIAAAAABkmAAQAAACADBMAAgAAAECGCQABAAAAIMMEgAAAAACQYQJAAAAAAMgwASAAAAAAZJgAEAAAAAAyTAAIAAAAABkmAAQAAACADBMAAgAAAECGCQABAAAAIMMEgAAAAACQYQJAAAAAAMgwASAAAAAAZJgAEAAAAAAyTAAIAAAAABkmAAQAAACADBMAAgAAAECGlUckXV0G6DI77rhjVxcBAACANeC5557r6iJAl1EDEAAAAAAyTAAIAAAAABkmAAQAAACADBMAAgAAAECGCQABAAAAIMMEgAAAAACQYQJAAAAAAMgwASAAAAAAZJgAEAAAAAAyTAAIAAAAABkmAAQAAACADBMAAgAAAECGCQABAAAAIMMEgAAAAACQYQJAAAAAAMgwASAAAAAAZJgAEAAAAAAyTAAIAAAAABkmAAQAAACADBMAAgAAAECGCQABAAAAIMMEgAAAAACQYQJAAAAAAMgwASAAAAAAZJgAEAAAAAAyTAAIAAAAABkmAAQAAACADBMAAgAAAECGCQABAAAAIMMEgAAAAACQYQJAAAAAAMgwASAAAAAAZJgAEAAAAAAyTAAIAAAAABkmAAQAAACADBMAAgAAAECGCQABAAAAIMMEgAAAAACQYQJAAAAAAMgwASAAAAAAZJgAEAAAAAAyTAAIAAAAABkmAAQAAACADBMAAgAAAECGCQABAAAAIMMEgAAAAACQYQJAAAAAAMgwASAAAAAAZJgAEAAAAAAyTAAIAAAAABkmAAQAAACADBMAAgAAAECGCQABAAAAIMMEgAAAAACQYQJAAAAAAMgwASAAAAAAZJgAEAAAAAAyTAAIAAAAABkmAAQAAACADBMAAgAAAECGCQABAAAAIMMEgAAAAACQYQJAAAAAAMgwASAAAAAAZJgAEAAAAAAyTAAIAAAAABkmAAQAAACADBMAAgAAAECGCQABAAAAIMMEgAAAAACQYQJAAAAAAMgwASAAAAAAZJgAEAAAAAAyTAAIAAAAABkmAAQAAACADBMAAgAAAECGCQABAAAAIMMEgAAAAACQYQJAAAAAAMgwASAAAAAAZJgAEAAAAAAyTAAIAAAAABkmAAQAAACADBMAAgAAAECGCQABAAAAIMMEgAAAAACQYQJAAAAAAMgwASAAAAAAZJgAEAAAAAAyTAAIAAAAABkmAAQAAACADBMAAgAAAECGCQABAAAAIMMEgAAAAACQYQJAAAAAAMgwASAAAAAAZJgAEAAAAAAyTAAIAAAAABkmAAQAAACADBMAAgAAAECGCQABAAAAIMPKu7oAAACsvoaGhsjlcrF8+fJYunRpNDQ0RE1NTdTW1kZdXV1ERNTV1UV9fX306NEjysvLo2/fvtHQ0FAYVlFREWVlZdGvX78oLy+PsjL/KwYAyAIBIADAOiwf6i1cuDBqamoKAeDy5csjSZI237N06dJYvHhxYZqysrKYP39+lJeXR5IksfHGG8eGG24oBAQAyAABIADAOmjRokVRX18fNTU1UVVVFXV1dbF8+fKiacrL0696ZWVlkcvlokePHpHL5SJJkqivr48kSaKhoSHKyspi2bJlERGF7qJFi2LgwIECQACADBAAAgCsQ5YuXRrV1dVRVVUV1dXVkcvlor6+vjC+X79+kcvlory8PNZff/1Yf/31o6GhIXr06BF9+vSJ5cuXR0NDQyRJEkuXLo0ePXpEWVlZVFVVxfLly6OqqirWX3/9GDBgQPTs2bML1xQAgI4iAAQAWMslSRK5XC4++OCDWLx4cSxcuLAwvGfPntGzZ8/o1atXDB48ONZbb73o0aNH9OzZM5IkiR49ehTNq1evXoW/+/TpU/i7X79+UVdXF5tssknU1tbGgAED1si6AQDQ+QSAAABrsSRJYuHChVFVVRVz5swpDO/du3f06dMnBgwYEOuvv35h2OooLy+P8vLy1Z4PAABrFwEgAMBaqKGhIRoaGmL27Nkxb968WLp0aURE5HK5GDBgQAwePDgqKioiIjynDwCAFRIAAgCsZRoaGqK2tjZmzpwZixYtKjTuscEGG0SfPn1i4MCBsd566wn+AAAoiQAQAGAtk3/WX3V1dURE9OzZM/r37x+VlZXRt2/fLi4dAADrGgEgAMBaZObMmbFgwYJYsmRJRESst956seGGG8bgwYNbNOgBAAClEAACAKwl5syZE/PmzSuEfxtttFEMGDAg+vfv38UlAwBgXSYABABYC8yePTs++OCDaGhoiIiIgQMHxoYbbhh9+vTp4pIBALCuEwACAHSx+fPnx/Tp0wv9+ef9rbfeepHL5bqwZAAAZIEAEACgC9XW1sYHH3xQ6K+oqIgtt9wykiTRyi8AAB1CAAgA0EXq6uriww8/jPr6+ohIG/zYYostIpfLqfkHAECH8W9lAIAukCRJVFVVxdKlS2P58uVRVlYWW2yxRfTs2bOriwYAQMYIAAEAusD8+fNj/vz5sXjx4oiI2GSTTaJXr15dXCoAALJIAAgAsIYtXLgwqqqqYuHChRERMXjw4BgwYECUl3s6CwAAHU8ACACwBjU0NERtbW3Mnz8/ItJGPyoqKmK99dbr4pIBAJBVAkAAgDWorq4uZs6cGRERvXr1ioEDB8bAgQO7uFQAAGSZABAAYA1JkiQWLFhQaOG3vr4++vfvH2VlvpIBANB5fNsEAFhDli5dGlVVVbF8+fKISBv+0OovAACdTQAIALCGLFmypNDwR0VFRfTt21ftPwAAOp2m5gAA1oClS5fG4sWLC4Ff3759o6KiootLBQBAd+BfzgAAa8CyZcti/vz50dDQED179oxNNtmkq4sEAEA3IQAEAFgDqqurY8mSJRER0adPn6ivr+/iEgEA0F0IAAEAOtny5ctj2bJlhf4+ffpEr169urBEAAB0JwJAAIBOliRJofGPXr16RZ8+fbq4RAAAdCcCQACATrZo0aLI5XIREVFfXx99+/bt4hIBANCdCAABADpZjx49IkmSiIgYMGBA9OjRo4tLBABAdyIABADoZAsXLoyGhoaIiOjZs2eUl5d3cYkAAOhOBIAAAJ1s6dKlUV9fH7169YoBAwZ0dXEAAOhmBIAAAJ0sf8tvQ0NDoSYgAACsKQJAAIBOVlZWFrlcLsrKyqK+vr6riwMAQDcjAAQA6GT19fWFRkB69+7dxaUBAKC7EQACAHSyurq6iIhYtmyZW4ABAFjjBIAAAJ1s8eLFERHRt2/f6NmzZxeXBgCA7kYACADQyfKNgESEABAAgDVOAAgA0MnKy8sjIn0WYG1tbReXBgCA7kYACADQyXr16hUREUmSxPLly7u4NAAAdDcCQACATpbL5SIibQxkyZIlGgIBAGCNEgACAHSyvn37Ro8ePaKhoSGWL18eSZJ0dZEAAOhGBIAAAJ1s/fXXL9QCXLJkSeFvAABYEwSAAACdrKGhIcrK0q9d9fX1ngMIAMAaJQAEAOhkvXv3jt69e0dE2hBIfX19F5cIAIDuRAAIANDJevbsGT179oyIiOXLl8eSJUu6uEQAAHQnAkAAgE6Wy+Wib9++UV5eHsuWLYuqqiohIAAAa4wAEABgDRgwYED06dMnIiKqq6tj2bJlWgMGAGCNEAACAKwBPXv2jAEDBkQul4v6+vpYsGCB1oABAFgjBIAAAGtAWVlZ9O7dO3r16hVJkkRtbW3U1dV1dbEAAOgGBIAAAGvIeuutF2Vl6devJUuWRHV1dReXCACA7kAACACwhvTs2TP69+8fEWlrwLNmzVILEACATicABABYQ8rKymLAgAHRr1+/iEhrAc6ZMyeWL1/exSUDACDLBIAAAGtQnz59YuDAgVFeXh719fUxd+7cqKmp0SIwAACdRgAIALAG9ejRIwYOHBh9+vSJiLQW4IIFC9wKDABApxEAAgCsYT179oyNN944+vbtGxERc+fOjY8++kgtQAAAOoUAEABgDcvlctG/f//YaKONory8PCIiqqqqoqampotLBgBAFgkAAQC6SP/+/WODDTaIsrKyWLp0acyePVsICABAhxMAAgB0kV69esWgQYOiV69e0dDQEFVVVTFz5sxYunRpVxdtleVbNl60aJFbmgEA1hICQACALrT++uvHZpttVmgUZMGCBTFt2rRYsmRJF5ds5dXX18cHH3wQH374YcydOzfq6+u7ukgAAIQAEACgS5WVlcWAAQNis802i4qKisjlclFfXx+zZs2K2trari5eyRYvXhwffPBBVFVVxfLly6OmpmadrskIAJAl5V1dAAAA0pqA9fX1UVdXF7W1tTF//vyIiBg8eHD07t07evbs2cUlbF2SJFFTUxOzZ8+OqqqqiIioqKiIDTfcMPr169e1hQMAICIEgAAAa4UePXrEoEGDonfv3jFjxoxYtGhRzJkzJ6qqqmKjjTaKDTfcMHr16tXVxSxIkiTq6urio48+iqqqqkJtvz59+sSQIUNi/fXX7+ISAgCQJwAEAFiL9OvXLzbddNOYNWtWVFdXR11dXcydOzd69OgR/fr1i379+kUul+vqYkZtbW3MmTMnPvroo4hIb2Veb731YtNNN40NNtigi0sHAEBTAkAAgLVMv379YqONNor6+vqoqamJZcuWxYwZM6J3796x0UYbxfrrr99lt9fW1dXFvHnzYsGCBbF48eKIiCgvL48NNtggBg0apOYfAMBaSAAIALCWKSsriw022CA22GCDmDt3bsyePTuWLFkSy5Yti9mzZ8eCBQti0KBB0a9fvygvL4/y8vJOqxWYJEnkcrmoq6uL6urqmDVrVixZsqSohd9NNtkkBgwYsFbdogwAQCMBIADAWmyDDTaIhoaGqK6ujgULFsSyZcti2bJlsXjx4sItwfmGQjpaPnSsra2NRYsWRU1NTSxfvjwi0mcWrrfeerHJJpu45RcAYC0nAAQAWIuVl5fHRhttFIMHD465c+dGTU1NVFdXx/Lly2PhwoWxcOHC+PDDD2PAgAGFWoM9e/aMurq6yOVy0aNHj3aXka/llyRJLFmyJHr16hULFiyIqqqqqK6ujoaGhkiSJJIkiYj0FuWKiooYMGBA9O3bt7M3AQAAq0kACACwDigrKyvc9tuvX79YtGhRLFq0KOrq6qK+vj6qqqqiqqoqIiLWX3/96NOnT/Tr1y+SJImePXtGWVlZVFRURE1NTZSVlUWvXr1i6dKlsWTJkoiIqK6ujvr6+qirq4uampro1atXLFu2rKgMvXr1igEDBsTAgQOjT58+JYWLAAB0PQEgAMA6okePHtG3b9/o27dvLF26NGpqaqKmpiYWLlxYFNblh3/00UdRXl4evXv3jkWLFkWPHj2id+/e0bt371i4cGGUl5cXAsR8DcC8ZcuWRXl5efTp0yc22GCDyOVy0bt376ioqIiysrKuWH0AAFaRABAAYB2UD/IGDx4cS5cujQULFhSeEZhvtKPpKyKivr4+Fi9eXGi9t2lDHvnwr2fPntGzZ88oLy+P/v37x4ABAwo1CAEAWDcJAAEA1nG9e/eOysrKqKysjPr6+kKDHUmSFBrtyN/Wmw8Hc7lc9OrVK+rq6qKhoSH69+8fZWVlhVp+y5Yt83w/AICMEAACAGRIjx49on///tG/f//Vmk95ua+JAABZ4V4OAAAAAMgwASAAAAAAZJgAEAAAAAAyTAAIAAAAABkmAAQAAACADBMAAgAAAECGCQABAAAAIMMEgAAAAACQYQJAAAAAAMgwASAAAAAAZJgAEAAAAAAyTAAIAAAAABkmAAQAAACADBMAAgAAAECGCQABAAAAIMMEgAAAAACQYQJAAAAAAMgwASAAAAAAZJgAEAAAAAAyTAAIAAAAABkmAKRbS5JEv379+vXr169fv379+vV3g37oznKJI4JubIcddujqIgAAALAGPP/8811dBOgyagDS7eVyOV1dXV1dXV1dXV1dXd1u0IXuSg1AujU1AAEAALoHNQDpztQABAAAAIAMEwACAAAAQIYJAOnWuroVKv369evXr1+/fv369evXv2b6oTvzDEC6tTFjxnR1EQAAAFgDXnjhha4uAnQZNQABAAAAIMMEgAAAAACQYQJAAAAAAMiw8q4uAHQlj8AEAAAAsk4NQAAAAADIMDUA6dbUAAQAAACyTg1AAAAAAMgwASAAAAAAZJgAkG4tfwuwrq6urq6urq6urq6ubra70J3lEkcC3dh2223X1UUAAABgDXjppZe6ugjQZdQABAAAAIAM0wow3ZoKsAAAAEDWqQEIAAAAABmmBiDdmhqAAAAAQNapAQgAAAAAGaYGIN2aGoAAAABA1qkBCAAAAAAZpgYg3ZoagAAAAEDWqQEIAAAAABmmBiDdmhqAAAAAQNYJAOnWmgaAuVxOv379+vXr169fv379+vVntB+6s1zS9IiAbmbrrbfu6iIAAACwBrz22mtdXQToMmoA0q3JvwEAAICsEwDSrQkAAQAAgKzTCjAAAAAAZJgagHRragACAAAAWacGIAAAAABkmBqAdGtqAAIAAABZJwCkWxMAAgAAAFknAKRbEwACAAAAWecZgAAAAACQYWoA0q2pAQgAAABknQCQbk0ACAAAAGSdAJBuTQAIAAAAZJ1nAAIAAABAhqkBSLemBiAAAACQdQJAujUBIAAAAJB1AkC6NQEgAAAAkHWeAQgAAAAAGaYGIN2aGoAAAABA1gkA6dYEgAAAAEDWCQDp1gSAAAAAQNZ5BiDdWvMAUL9+/fr169evX79+/d2zf+DAgZEkSQwcODD23HPPGD58+FpVviz39+jRI7beeuvYaqutokePHp22POjOcokjgm5sk0026dD5DRkyJCIiZs2a1aHzBQAAOs+gQYNixx13jIceeigGDRoUw4YNi0GDBsW8efPiueee6+ritTB8+PAYOHBgof+dd96JefPmRUQUyp83f/78ePvtt9d4GVfG1ltvHZtvvnlEREyfPj1ee+21TlnOhx9+2CnzhXWBW4Dp1jo6/06SJLbbbrv48MMP44033oilS5d26Pxb86lPfSp+/vOfx4gRI2LZsmXx1FNPxWGHHRb19fWtTv/Vr341Tj/99Nh0001jwYIF8fvf/z4uvvjiwvif/OQn8eUvfzkqKirio48+itNPPz0efvjhTl8PAADoCoMGDYoddtghkiSJJEli7ty5MXfu3Bg+fHgMHz489txzz3jooYe6uphFBgwYEAMHDiyEfvmyN/970KBBRf1rq7bKD3QcASDdWmcEgEmSxMYbbxwbbbRRvPHGGzF9+vQOXUZT/fr1i7vuuit69OgRF198cWyyySZx5JFHxj333BP77LNPi+l33HHHuPLKK2P69Onxwx/+MPbaa6845ZRTYtmyZXHFFVfEd7/73TjqqKPi0Ucfjb/+9a9x3HHHxU033RRf+MIX4sUXX+y09QAAgK7QNPyLKP598NZbb0WSJDF8+PAYNmzYWleLbt68efHss8+2GJ4PMCMixo4dGxFr/62w06ZNK5TxjTfeWOvLC+sizwCkW2t+oV/dblP551iMGTMm1l9//U5Z3qGHHhrrr79+nHbaafGLX/wivve978U///nPGDNmTAwYMCCSJIk777wz7rnnnoiIOPvssyMiYpdddonrrrsuvvKVr0RtbW1861vfioiI4447Lj766KP46le/Gtddd11MnDgxcrlcfPe73+2U8uvq6urq6urq6uquye7AgQMLgd7w4cNjxx13jKaSJCm6hfatt96K+fPnx/Dhw9eK8me1W1dXF6+99lpMnTo16uvrO2050J2pAUi3liRJh15Ums4vb/DgwbHTTjvFW2+9FW+//XaHLu9Tn/pURET86U9/ioaGhohI/3s2ZsyYGD16dDz22GMxevTo6NGjRzQ0NMTHPvaxmDt3bixevLhQvo8++iiGDh0aDQ0N0a9fv/j73/9eWI/p06dHkiQxcuTIwvy7+suBrq6urq6urq6u7qp2Bw4cWPR8vPy4pv0NDQ0xfPjwmDdvXsybNy/efPPNGDt2bAwYMCDmzZu3VqzHW2+9Vfi7I6br6m6PHj1i5MiREZH+nmloaOi05UF3JQCENWTLLbcs3Bacf1ZHR8yzoaGh6Hl/r776akREbLbZZhERse2220ZZWVrZd8MNN2zx4Nv//Oc/8bGPfSw+8YlPRC6Xa9GASU1NTaEGIwAAdBf5EHBtVGq51tbyNzdy5MhCIyAREVOnTu3C0kA2CQDp1jr6v0BN/7O2ovEdtdz11luvMN+85cuXR0TE+++/H0mSRE1NTWFceXl59OjRo2j6+vr6aGhoiL59+xamaT6+urraf8wAAFjnrcz39Xxtv/xr4MCBhWfr0bGa11D02wM6nmcA0q2tyWrlb731Vjz55JMdetvAnDlzoqysLAYOHFgYPmTIkIiI+Mc//tFi+sWLF8fgwYOLhg8aNCiqqqrinXfeiYiILbbYomh8375948033+zU7aWrq6urq6urq6u7JrttaT4+SdLvyxGx1tz+G5E+yzDfwm9HTNfV3ddffz2mT58e//nPf4oaBFlTnzt0B2oA0q0lSec/A3DOnDkxbdq0qK6u7rDl5LvPPvts7LrrrvG5z30ubr/99oiI+NznPhe1tbVRW1vbYvp///vfse2220ZFRUUsXLgwIiJGjBgRr776aixatCiWLVsWW265ZWE9Ro4cGb169Yrnn3/eMwB1dXV1dXV1dXXX+e7cuXML/RHpI3WaSpL0GYBvvvlm4fl5+efRzZkzp8vLn+/mGyWZM2dOh0zX1d2GhoZ45ZVXoqnOWh50V2oAQiepq6uLqVOnxnPPPVcI/zraVVddFUmSxOWXXx6f+9zn4jvf+U5st9128eyzzxamueeee+LBBx+MiIirr746crlc3H///fFf//Vfccstt0Tv3r3jjjvuiIiIyZMnR2VlZfz85z+P//qv/4pJkyZFkqQtCQMAwLpu3rx58dZbbxVezzzzTJvTRKQB4ZZbblnop3P06NEjRo0aFaNGjYoePXp0dXEgk3KJGJxuLH/rbEfZeOON49Of/nR88MEHMW3atFi6dGmHzr81X/3qV+NXv/pV5HK5iIj48MMPY/vtty8s+7333ouePXsWGgW59tpr46CDDiq8/6GHHoqvfvWrERHRs2fPeOqppwotoyVJEt///vfjV7/6VaevBwAAdIVBgwbFuHHjIiLiL3/5S2F4PvxrPnxtMG7cuBg0aFChkY+33nqr8PegQYMK5c5P01rQuTYZNWpUfOxjH4uItJHCzmoEZP78+Z0yX1gXCADp1gYMGFAIziLSwGt1+ocMGRK5XC5mzpzZIfMrtb93796x1157xdSpU+Ptt99ud/rBgwfH7rvvHo888kjRRTA/fsstt4xRo0bF/fffH3V1dZ1efv369evXr1+/fv36u7I/HwI+8MADkcvlCgHb3Llzi+6uWVvKu+WWWxae7ZfL5QoBYJIkMXjw4EIAmCRJzJ8/v1CDcW0pf/P+fACYJElMnz69EAB29PIEgHRnAkC6tQEDBnR1EQAAgLVAvrZcPgxcF2rOZUWPHj1iq622ioi0QZD6+vpOWU5VVVWnzBfWBQJAurUNNtigq4sAAADAGrBgwYKuLgJ0Ga0A063JvwEAAICs0wowAAAAAGSYGoB0a2oAAgAAAFmnBiDdWj4A1NXV1dXV1dXV1dXV1c12F7ozjYDQra2//vpdXQQAAADWgJqamq4uAnQZNQDp1rr6P1C6urq6urq6urq6urq6a6YL3ZkagHRr/fr16+oiAAAAsAYsWrSoq4sAXUYjIHRr8m8AAAAg6wSAdGsCQAAAACDrPAMQAAAAADJMDUC6NTUAAQAAgKxTAxAAAAAAMkwNQLo1NQABAACArFMDEAAAAAAyTA1AujU1AAEAAICsEwDCGiJsBAAAKJbL5bq6CNAtCABhNWy++eax1VZbRa9evbq6KAAAAJm0bNmyeP3112P69OldXRRYZ3kGIN1ae7Xy2hsv/AMAAOhcvXr1iq222mq1f79Bd6YGIN2eiwgAAMC6we83WDVqAMJqeO2112LZsmVdXQwAAIDMWrZsWbz22mtdXQxYp+US8TjdWHm5SrAAAADdQV1dXVcXAbqMGoAAAAAAkGECQAAAAADIMAEg3Vr+DnhdXV1dXV1dXV1dXV3dbHehO/MMQLq1Hj16dHURAAAAWAPq6+u7ugjQZdQABAAAAIAMEwACAAAAQIYJAOnW2ns2hPHGG2+88cYbb7zxxhtvvPHZGA/dmWcA0q2VlcnAAQAAuoOGhoauLgJ0GekHAAAAAGSYABAAAAAAMkwACAAAAAAZJgAEAAAAgAwTAAIAAABAhpV3dQGgK2kEGwAAAMg6NQDp9nK5nK6urq6urq6urq6urm436EJ3lUtUgaIbcxEAAADoHsQfdGdqANLtdfV/oHR1dXV1dXV1dXV1dXXXTBe6KzUA6dZcBAAAALoH8QfdmRqAAAAAAJBhWgGmW/MfIAAAACDr1AAEAAAAgAwTAAIAAABAhgkAAQAAACDDBIAAAAAAkGECQAAAAADIMAEgAAAAAGSYABAAAAAAMkwACAAAAAAZJgAEAAAAgAwTAAIAAABAhgkAAQAAACDDBIAAAAAAkGECQAAAAADIMAEgAAAAAGSYABAAAAAAMkwACAAAAAAZJgAEAAAAgAwTAAIAAABAhgkAAQAAACDDBIAAAAAAkGECQAAAAADIMAEgAAAAAGSYABAAAAAAMkwACAAAAAAZJgAEAAAAgAwTAAIAAABAhgkAAQAAACDDBIAAAAAAkGECQAAAAADIMAEgAAAAAGSYABAAAAAAMkwACAAAAAAZJgAEAAAAgAwTAAIAAABAhgkAAQAAACDDBIAAAAAAkGECQAAAAADIMAEgAAAAAGSYABAAAAAAMkwACAAAAAAZJgAEAAAAgAwTAAIAAABAhgkAAQAAACDDyru6AKuipqYmnn766XjllVfixRdfjAULFsTWW28dW2+9deyxxx6x6aabtnhPVVVV/N///V+h//Of/3yMHDmy08s6e/bsuPfee+PZZ5+NDz/8MLbaaqsYMWJE9O3bN+bOnVuY7tvf/naUl3fsx/H444/HK6+8stLv22+//aJ///5x8803F4Z97nOfi6222qojiwcAAADAGpBLkiTp6kKsjMcffzwOPfTQmDFjRpvT/PznP4/jjjuuKFB7/fXXY+utty7033TTTXHooYd2aln//e9/xx577BHvvPNOi3E77rhjPPfcc4X+mpqa6NevX4cu//jjj49f/epXK/2+Bx98MLbYYosYMWJEYdjvf//7+MY3vtGRxQMAAABgDVinbgH+/ve/H7vvvvsKw7+IiJNOOim+8pWvRENDwxoqWetuvvnmVsO/XXbZJXK5XBeUCAAAAIDuZp25Bfjhhx+OCy+8sGjYqFGjYrfddovZs2fHH//4x6Jxd999d1x77bVx7LHHrsliFnnzzTeL+h955JHYYostor6+vkXtw84IBEePHh0TJ04sGvbqq68WhZITJkyIQYMGFU2z0UYbRe/evYtqAG6wwQYdXj4AAAAAOt86cQvw8uXL41Of+lS88cYbhWE//vGP46yzzooePXpERER1dXUcf/zxRc+tGzZsWLz55ptRVlbWJbcAf/azn43HHnssItKw8tVXXy2MO/zww+P3v/99RERUVFTEwoULO7UseT/60Y/ihz/8YaH/+eefjzFjxqyRZQMAAACw5q0TtwDfeuutReHfxIkT4/vf/34h/ItIQ7QrrrgiKioqCv3bbrttUUMbzf3jH/+Ir3/96zFkyJDo379/fOlLX4o//OEPbU4/derUOOqoo2KbbbaJXC4X22yzTRxxxBExZcqUoumuu+662G+//eL5558vDJs+fXrst99+sd9++8W0adNihx12KIwbP3580ftnzpwZZ555ZowbNy769+8fuVwuhgwZEjvvvHP86le/ikWLFrWzxVbfhx9+WCjvfvvtF48++mhh3B133FEYftppp8XSpUvj8ssvj8997nORy+Viu+22i5NOOqnVbZ8kSdx7772x7777xvDhwyOXy0Uul4vhw4fHd7/73fjnP//Z6esGAAAA0J2sE7cANw+Fzj777Fanq6ysjEceeSR69+4do0aNKgoIm/v5z38ezz77bNGw++67L+67776YNm1aUS25iIjrr78+jjrqqKJhU6dOjalTp8bvfve7uOCCC+IHP/hB5HK5+Ne//hX33HNP0bTV1dWFYWeddVaMGTOmEFY2DQBfeeWV2HbbbVuUd/bs2TF79uyYMmVKPP300/G73/2uzXXrCDU1NUXr8JWvfKXw95tvvlkYN3To0Jg6dWo8+OCDhfEvv/xyvPzyy/GHP/whHnroodhuu+0iIg3/vvnNb8ZNN93UYnnvvPNOXHHFFXHFFVfEq6++GqNGjeqsVQMAAADoVtaJGoDTpk0r6m8tIMvbcccdY9ttt11h+BcRhfCvoqKiEMTlnXvuuTFnzpxC/8svv9wi/Gvuhz/8Yfz5z39e4TRNTZgwIRYuXBgLFy6MCy64oDD8yCOPLJpuzJgxsc8++xSV8fe//32LZx52lRkzZhSFf03Nnj07vvGNb0R9fX1ERPzlL38pCv8qKipin332aRH2HXbYYbFs2bLOKzQAAABAN7JOBID/+te/Cn9XVFREv379OmS+1157bcyfPz/mzZvX4nmATz31VOHvk08+uWjc7bffHsuWLYsXXnihKLw6++yzo76+Ps4///x49913i56tN3r06Hj33Xfj3XffjbFjx7ZanpqamqJaiVdffXU8//zzcf/998fMmTMLjXKMGTOm6PbirlZRUREPPvhgNDQ0xH/+85+i9Xv55Zfjtttui4iIv/71r0Xvqaqqivvvvz9effXVuP766yMircW5ySabFD0vEQAAAIBVt04EgDNmzCj83adPnw6Z58SJE+Poo4+OHj16RHl5eZxwwglF4+fNmxcREVVVVYWGPCIi9t9//zjooIOiZ8+esf3228cpp5xSGDd16tR49dVXY+DAgfGJT3yiKKisqKiIT3ziE/GJT3wievbs2WqZmtdaPP744+Nb3/pWTJo0KRYsWBBPP/10LF26NJ5//vkWLSJ3pQsvvDD22muvyOVysfnmmxcaN8l7/PHHI6K4JeHq6uoYO3ZsXHbZZfH000/H17/+9Zg9e3bMmjUr7r333th+++3X6DoAAAAAZNU68QzAESNGFBoBmT17dtTX17d7i2978s+lyxs0aFBR/9KlSyMi4t133y0aPmfOnDjrrLMK/R988EHR+OnTp8fo0aNXqUx9+vSJPfbYIx555JHCsN/+9rfx29/+NiLSmn8HHHBAHH300TFkyJBVWkZn+MIXvlDUv/XWWxd9Zq+//npEpK0iN/XCCy/ECy+8EBFpQDpx4sQ47LDDWswPAAAAgFW3TtQA3GabbYr6Z82a1ea0jz32WNx8881Fz/Brzcc+9rGi/rZq5b333ntF/VOmTIlLLrmk8GreoMW///3vFS63PTfddFMMGzas1XEvvPBCnHPOObHxxhvHXXfdtVrL6UiDBw9uMWzo0KGFv/Mh6s477xwXX3xxq/Oorq6OW265JfbZZ5/Ya6+9ora2tnMKCwAAANDNrBMB4NZbb13Uv6IGMH7wgx/EYYcdFhtttFF87nOfi/fff7/V6ZrfSpzL5VqdrnkDIcOGDYuJEye2+Ro4cGApq9SmTTfdNKZOnRq33npr7L333m1Od8ABB0RVVdVqLaujzJ07t8WwpgFs0zDwzDPPjBdeeCFOP/30ouFNTZ48OS677LKOLygAAABAN7RO3AK87777Fj3z7sc//nEceuihLcK2++67L6ZMmVLof+WVV1b7VtmPf/zjRf2f/exn4ze/+U2hf/HixfH+++/HFltsEeXlq7856+rq4oMPPoiNNtoo7r777kiSJJ599tmYPHly/PrXv47Zs2cXpn3llVdi5513Xu1lrq7HHnssttxyy0J/VVVVvPzyy4X+kSNHFv5esGBBJEkShx12WFx66aXxzjvvxJQpU+IPf/hD3HfffYXp8s8NBAAAAGD1rBM1ACdMmBCHHXZYoX/27NkxevToeOihh2LBggVRVVUVv/nNb+L//b//V/S+U089dbVDuU984hNRWVlZ6L/99ttj/vz5hf6TTjopRowYET179oztttsuXnrppVVe1g033BA9e/aMYcOGxec///n4+c9/Huutt17ssssuccEFF8T5559fNH2vXr1WeVkd6fvf/3688847ERHR0NDQopw77LBD1NfXx/Dhw2PAgAGxww47xIQJE6KqqiqGDRsWhx12WNx9991FtS2XL1++RtcBAAAAIKvWiRqAEREXX3xx0fP2ZsyYscJbZEeMGBHf/va3V3u5PXv2jHPOOSdOPPHEiEifVTd69Og49NBD47XXXou77767MO2SJUtaPK9wZTRfnzPOOCOeffbZGD9+fEydOjVuv/32ovGr2thIR5s9e3Z8+tOfjj322CPefffdotp/lZWVceSRR0aPHj3iS1/6Uvz85z+PiHQ7/td//VcceOCBsd5668VDDz0U1dXVhfdNnDhxja8HAAAAQBatMwHgpptuGq+88koccsghRQFTayorK+Ohhx5a7efx5X3rW9+KJ598Mm655ZaISMPH1hqz+P3vf79aNQ433XTTuPHGG+Mb3/hGYdikSZNi0qRJLaa9++67Y7311lvlZXW06urqojA071e/+lX069cvIiLOO++8ePLJJ+PZZ5+NiIg33ngjLrroohbvGTt2bPzP//xP5xYYAAAAoJtYJ24Bzttmm23imWeeiXPPPTdGjBjRYnxFRUX85Cc/iTfeeKPFs/uaN/LRo0ePov6ysrI2+9dbb724+eab42c/+1mrLfROnDgxXnrppRg/fnyby2g+/7Ycdthh8eijj7ZZu3H33XePKVOmxH777VfS/JprXo62Gj9pPnxF5Z86dWrsvvvuRcOGDRsWTz75ZHz5y18uDBs4cGA89NBDcd5557VoXCUi/fx+/OMfx8MPPxx9+/Ztb1UAAAAAKEEuSZKkqwuxqhYtWhSvvfZa1NfXF57V11ag1ZEWLlwYr732Wqy33nrxsY99rMNqGja3YMGC+OCDD2Lu3Lmx4YYbxuabb16oTdeVLrroojj77LML/QsXLoyKioqYN29evP766zFs2LDYeOONVziPurq6mDlzZsyYMSPKyspi6NChsfHGG5cclAIAAABQmnU6AKRrtBUAAgAAALD2Ud0KAAAAADJMAAgAAAAAGbbOtALM2mPLLbeML37xi4X+nj17dmFpAAAAAFgRzwAEAAAAgAxzCzAAAAAAZJgAEAAAAAAyTAAIAAAAABkmAAQAAACADBMAAgAAAECGCQABAAAAIMMEgAAAAACQYQJAAAAAAMgwASAAAAAAZJgAEAAAAAAyTAAIAAAAABkmAAQAAACADBMAAgAAAECGCQABAAAAIMMEgAAAAACQYQJAAAAAAMgwASAAAAAAZJgAEAAAAAAyTAAIAAAAABkmAAQAAACADBMAAgAAAECGCQABAAAAIMMEgAAAAACQYQJAAAAAAMgwASAAAAAAZJgAEACgG/voo49ixowZUVdX19VFAQCgk6wTAeAOO+wQuVyuzddxxx1X8rzOP//8yOVyUVtb22nlTZIkJk2aFP/85z8jImLJkiWRy+Xi3HPP7fBl1dbWxkUXXRRLlizpsHk2L//a5vLLL49cLhfz589vdfwFF1wQuVwuFi9e3OllGTduXHzuc58r9D///PNxxx13FPp33XXX2HnnnTu9HB3h8ccfj1wuFw899FBXF2WF1uTnW6qnnnoqcrlc3Hvvva2Or62tjVwuF+eff36HLrf58f/oo49GLpeLhx9+uMOX0xnlb03zY6i5VdlPO+M8uTqan2OXLVsWuVwuzjnnnDVelqbnsAceeCByuVw88cQTa7wca4s1dR5s7zrWno46Dy5evDh23nnnqKysjM033zz+/ve/r9b8IiJmzZrV6ne14cOHx/HHHx/vvvtu0fSf/exnY6eddlrt5a6Nmh/rnXWOXrhwYeRyubjwwgtXuWwdpb1z+Jry4x//OHK5XNTU1KzS+I6yNlx/9t1339hhhx26bPkr0l7ZOmM/7ajjcE3tQwAdaZ0IACMiKioq4rzzzmv1tc8++5Q8nyFDhsSYMWOirKzzVv2JJ56Igw8+OD766KOi4Q0NDR2+rCuuuCLOPvvsqK+v77B5tlX+tUV+OyZJ0ur4/PC2xnek+vr6QnmWLVsWO+64Y7z00kuF8Q0NDR362XSmioqKGD9+fPTv37+ri7JCa/LzXVnt7ZMdfQ5ofvx31nI6a77NtXYMNbcq+2lnnCdXR/NzbFlZWYwfPz423XTTNV6WpuewvLXx2FpT1tR5sL3rWHs66jz4wAMPxJQpU+LII4+Me+65J3bcccfVml/TMo0dOzbOO++8OPfcc+P444+P4cOHx69+9asYNmxY/Pvf/y5Mv/XWW8c222yz2stdGzU/1temc3RnfNcr5Ry+prR3jG2yySYxfvz4Tv09ELF2XH8aGhrW2tq97ZWtM/bTjjoO19Q+BNCRyru6AKXaaKONOqQG3bHHHhvHHntsB5SobZ39I7mzl7Umy8/aY8yYMfHkk092dTFYCd3xWF2V/XRt207Ny1NeXu7YW0t0t/Pg7NmzIyLi9NNPj6222qpD573zzju3+N52zTXXxHHHHRdHHnlkTJ48OcrKyuLqq6/u0OWuTda2c09Ta3PZ1oSjjjoqjjrqqE5fTnffzqtrbd5+a2ofAuhImfqXxXPPPRfjxo2LRx55JL70pS9FLpeLkSNHxi9/+cvCf3uuvfbaGDduXCxdurTwvjvuuCPGjRtXmP773/9+oar+aaedFrvuumssW7asaFnHHXdcfPWrX21Rhqeeeiq+/e1vF6Y588wzC+OqqqriW9/6VvTv3z8233zzOPnkk2P58uWF8TU1NXHyySfH8OHDo3///rHXXnvFCy+80Ob6Xn311XHVVVdFRHqr6R/+8IeIiPjzn/8cn/vc56J///6Ry+Vihx12iPvuu6/wvtra2jjxxBNjyJAhkcvlYty4cfG73/2uzfKv7DbIa68cxx13XFxwwQVx2WWXxciRI6N///6x3377xXvvvVc0n+uvvz523nnn6N+/f3z5y1+O999/v81lNnXPPfcUbh//7Gc/G3/729+Kxr/99tvx5S9/Ofr37x9DhgyJb3zjGzFr1qzC+OXLl8cVV1xR2DdyuVzsu+++8frrr7dYVv42qoh0H9t1110L4+rr6+PSSy8tfK5f/vKX44MPPoiIiJtvvjnGjRsXL7/8ctH8rrvuuhg3blzMnTu3xbKuueaa+O///u+4/PLLo3///rHvvvtGTU1NNDQ0xC9/+cvYbrvtCtv71ltvLXpvdXV1fPe7342RI0fGkCFD4tRTT41XX301IiJeeOGFGDduXDz11FMRkX4+Z599dpx11lkxZMiQGD58eJx11lmxaNGionk+/PDDseuuuxaOn3PPPbdoX3n33Xdjv/32i/79+xf266effrqNT23F+2dT7X2+7e1/bW3HUo7DJ554Ir785S9HLpeLnXfeOaZMmdLm+jQ1c+bMOPjgg6N///4xcuTIuPLKKyNJkqirq4tdd901TjzxxKLp8/vV5Zdf3mJebR3/ERGvvfZafOELX4hcLhfbbbddXHPNNUXvXdlzTXvlzytlH7zqqqti5MiRhf3l7LPPjqVLl67wGGqqtf10ReeRtrZTe8d/a/vH3/72t3avMRER//rXv+Lggw+OzTffPHK5XGy++eZx+eWXR319favn2Lq6uhg3blxREPLPf/4z9t1330L5DjvssMJ5IyI9R3zpS1+K++67L3baaafCsfLoo48Wba/2joO2vPLKK60ee02vs809/fTTMW7cuLjxxhtjyJAhscMOOxRu22rvPBER8b//+7+Fc/3Xv/71+Mtf/lIYt2TJkrjwwgsL+84OO+wQN954Y9H7lyxZEueee25ss8020b9///j2t78dl1xySey7774RkdZOGjduXNx2221xzDHHxJAhQ2LIkCFx4oknFm6lbbp/TZs2LcaNG9fq69RTTy0st5R1W5Xr2GuvvRZHHHFEDBkyJLbZZpv4yU9+EnPmzCmaZnWuc9///vfjvPPOi4iIr371q4V1ausY7QjHHntsHHnkkfHII4/EtGnTIiI9Dpr+iG5v+Su6jkWk56n859v8mrN48eLYaaedCuud99FHHxUdg+2dI9u6fjS1ou+DnXWObmplz0MRa+YcHrF6n+HKTNPUm2++GTvvvHPst99+sWjRovjtb38b48aNi8WLF5d0boho/xzTXGvXn1U9T5ZaxqVLl8Y555xT2LZnnHFGi+N3ZY/xUr4Pl/KdvpSyNdXWflrK8dHePhax4uOwlN+UTfeh/Gf0/e9/v7DM0047LS666KL4+te/HhERixYtinHjxsX1119fVI5TTz21ME3E6h2HAO1K1gFjxoxJhg0blixbtqzVV95f//rXJCKSiEhGjRqVnH766cmwYcOSiEjuv//+JEmS5Ic//GESEcmiRYuSJEmSG264IYmIZMyYMclll12WHHrooUlEJPvss0+SJEly3XXXFb0/SZLkww8/TCIiOeecc1qU9c0330wOO+ywJCKSQw45JLn11luT2traQrmGDRuWnHHGGcmYMWOSiEh+8IMfJEmSJHV1dcnYsWMLy77ooosKZX/ppZda3S6TJ09OdtlllyQiku9973vJk08+mTzyyCNJRCSjR49Ozj///OToo49OKioqkohIZsyYkSRJkpxxxhlJRCRHHXVUcvHFFxeWe++997Za/pXdBkmSlFSOCRMmFLbLYYcdVljumDFjCvOZNGlSEhHJHnvskVx66aWF7RYRydy5c1td9nnnnVeY5pBDDkmOPvroQv/s2bOTJEmSGTNmFMpz9NFHF/aLoUOHFvaN888/v+jz2H///ZOISEaMGJE0NDQU9s3dd989Wbp0aXLOOeckEZFMmDAhufDCC4vWsaKiIjnxxBOTiRMnJhGRjB8/PkmSJHnttdeSiEjOOuusonUYNWpUYZrmfvCDHxTWZ/To0YXtlf9cR40alVx44YXJnnvumURE8utf/zpJkiRZvnx58sUvfjGJiGT33XdPzjrrrKSioiIZMWJEsnz58uTRRx9NIiJ54IEHWpT9yiuvTM4888zCZ5V3zz33JBGRVFZWJmeffXZhW3/ta18rTDNmzJikoqIiOeOMM5If/vCHSWVlZRIRyUcffdTq+q1o/yz18y1l/2ttO5ZyHL755ptJRUVFMmzYsOSSSy5JDjzwwMJ8/vznP7e6TosWLSo6N1122WXJ3nvvnUREYV855JBDkohI5s2bV3jfnXfemUREMnny5BbzbO34b3oO3GWXXZLvfOc7he394IMPJkmyaueaUspfyj547733JhGR7Lnnnsmll16aHHDAAUlEJGeeeWabx1Bzbe2nbZ1HWttOpRz/re0fpVxj5s+fXzgmvvOd7yRnnXVWYZpJkya1eo5dunRpoXxJkiSvvvpq4di74IILCsdeZWVlYf9oehzsueeeyamnnlronzNnTsnHQf4cliRJcv/99ycRkTz++OPJ8uXLk8rKymTs2LFF2/+UU05JIiKZP39+i8/mwQcfLJRhxIgRSUVFRTJz5sySzhMXX3xx4Rr5wx/+MBk1alQSEclbb72VJEmSHH744UlEJAcccEBy+eWXFz73Sy+9tDCPE044IYmIZOzYscmPfvSjwr5fWVmZJElSdC2uqKhITjrppMK+ceaZZ7bYv2bMmJGcfvrphdcZZ5yRjBgxIomI5Dvf+U6SJKWdA1flOvbhhx8mQ4cOTSIi+eY3v5kcc8wxhf21+ee/qte522+/vXAcn3LKKclNN920wmO0VPnvB6eeemqr4/PntbvvvjtJkiTZZZddCvtZe8tv7zq2aNGiwmd0yimnJBdeeGFhO06ZMiVJkiT54he/mFRUVCRLliwplOnaa69NIiJ58sknSzpHtnUdbqq1Y72zztFVVVVJRCQ/+tGPkiRZtfNQkqyZc3hHfIalTJP/Drdw4cLkP//5TzJ06NCkoqIiefXVV1uML+XckCTtn2Oaa+36s6rnyVLLeOKJJyYR6Xeon/zkJ4VzwOjRo9v9DNtSyvfhUr7Tt1e25lrbT0s5Ptrbx0o5Dku53jfdh5ruHyeddFLyi1/8ojDPUaNGJUnS8jjN23vvvQvTJMnqHYcA7VlnAsD8Sbi114svvpgkSePJ+oADDii8Nx+wnH766UmSFAeAdXV1hQtCbW1t4T35L9qTJ09O5s6dm0REcvjhhxfGX3311UlEJK+88kqr5c3/gMhfRPIX7crKykLokf/Bl//hddtttyURkZx//vmF+Xz00UdJRCT7779/m9smf/GpqalJkiRJjj/++CSi8cdd0/LmA5QxY8YUXWxnzpyZ7LLLLskNN9zQavlXZRuUUo78l4UXXnihME3zEKSioiIZPXp0snz58iRJ0i99+S957QWAF198cWHYZZddlkRE8thjjyVJkiTHHXdcEhHJo48+Wpgmf0G98sork4aGhmTEiBHJ6NGjk7q6usI0Bx10UFH5mv54zn+m+VC36Trmv5AmSZLss88+SUQUfnyMHTs2qaysTOrr65MkSZKXX345iYjkmmuuaXX98j88rrjiiiRJkqShoSF57733ivanJGn8EVFRUZHU1tYmd911VxIRyS9+8YvCNHfffXcSEcmNN97YZrCS3w+SpHF/e+aZZ5KGhobCl6GqqqrCNPmw4vnnn0/mzZuXRERy2mmnFcZPnjw52XPPPZOnn3661fVrb/8s5fMtZf9rbTuWchzm99Gm8z744IOTiNICwPy+U1dXl4wfPz6pqKhIFi1aVAhf8uuZJOn+VllZWdj/m2t+/OfPgfvss09hv33qqaeSiEguuOCCJElW7VxTSvlL2QfzX2rzAVVDQ0Ny8MEHJ6ecckqSJK0fQ821tZ+u6DzSfDu1d/wnSev7RynXmFtvvTWJiOSuu+4qTJM/ps8444yidcgfW80DwPyX+TfeeKMwj1tuuSWJiOSHP/xhkiSNx0HT4zn/z5p77rknSZLSrwetBYBJkiRnnXVWEhHJm2++mSRJ+qOqoqIiOfjgg1v9bPI/bCdOnFjYZqWcJ/L730EHHVTYb99///0kIpIjjzwyefHFFwt/5y1fvrwQAMybNy/517/+lUSkYVnz7d48AKysrEyqq6uTJEmSJUuWJEOHDi2cc5rvX009+eSTSUQabixZsqSkdUuSVbuO5QPdpvv1aaedVvg8OuI6lyRJctVVVyURkfznP/9JkiRp9xgtRXsBYH475sPbpgFge8tv7zr2y1/+MomI5Pe//31hfP7cNGHChCRJkuT2228vOgbyZRg2bFjJ14HWzg+taX6sd9Y5unmwsCrnoTV1Du+Iz7CUafLn/TfeeKMQtL388suF6VsLAFd0bijlHNOa5tefVT1PllLG/PWo6fGaL2N+mpU9xkv9PtzetbiUsrWm+X5ayvHR3j5WynFYyvW+6T70xhtvtDjv5f+ZtzIBYEcchwArss48A7CioqLN1n432mijov4DDjig8PeIESMiIq0K3txrr70W1dXVcd5558V6661XGL7ffvvFtddeG88880x8/vOfj4MOOih+97vfxVVXXRV9+/aNG2+8MUaPHr3SD63ea6+9YsMNN4yIiF69esUXv/jFwoOS//GPf0RExKabblrUKtWIESNavdWqLVdddVVceeWVEZHe7vDGG2/Ec889FxFRqKK+/fbbx29/+9vYa6+94uCDDy7c2taWQYMGrfQ2KKUcERGVlZWx/fbbF/q33377uOWWW6KmpiaWLFkS1dXVccQRR0R5ebqr9u3bN4488si44IIL2t0W//3f/134e7fddouIiOnTp0dExF//+teISKvrN28F7KmnnoqTTz45pk2bFsuXL48lS5bE22+/HVOnTo2ZM2dGRHqb6sCBA9stQ0S6737mM58p9H/+85+PBx54ID744IPYYost4ogjjojjjz8+nnrqqfjMZz4TkyZNioiIAw88cIXzzd9yksvl4vnnn4+IiFGjRhWtzyc/+cl49tln45133incZvzNb36zMH7ixInx4YcfxsYbbxyPPfZYq8vZZZddCn8fcsghce6558bLL78cH/vYx+Kdd96JCRMmxLPPPluYJr9dnnvuuRgzZkwMHTo0fvrTn8a8efNiv/32i89//vMrbGGz1P1zRZ9vqftfRPF2LOU4fO6552LixImx2WabFcYfffTRcfvtt7e5Tnl77713Yfv06NEjjjzyyDjmmGPirbfeis9//vNRUVERt912Wxx++OFRVVUVkyZNitNPP72w/5fqwAMPjB49ekREFFrXyz90f3XONSsq/1tvvRURK94Ht91224iI+MxnPhNHHnlk7LvvvkW3Lq+qFZ1HWjtOSzn+85ruH3krusZ87Wtfi6985StRVlYW06dPj7feeiueeeaZiGi577UmSZJ4+OGHY+LEifHJT36yMPwLX/hCRESLVlqb3nqW3775WxFX5jhozSGHHBIXX3xx/PGPf4wzzzwzHnvssaiuro7DDjtshe/bb7/9IiLdZrNmzWr3PJHfdkceeWRhv910001j1qxZMXjw4LjuuusiIoqe31teXh4HHHBAXHLJJfHqq68WbjPLLzu/PYYOHdridtwvfelLsf7660dERO/evWPbbbct3I7alunTp8f+++8flZWV8cc//jF69+5d0rptsskmq3Qd++c//xljx44t2q9/9KMfxTnnnBMDBgwoDFvd61xznXWMNpXfP1t7cH57y2/vOpZ/LMkhhxxSGP/xj388xowZE1OmTInly5fHF7/4xYiImDRpUnzxi1+M6dOnxxNPPBE/+tGPSr4O5LV2fihFZ52j81blPFTK94iO2D864jPMfydY0TR5e+yxR8yYMSP+/Oc/F8rflhWdG1588cWIKO0cU4qVPU+OGjWq3TK+8sorERFx6KGHFpVxwoQJhfPsyn6GuVyu5O/DK7oWl1K2UpRyfLS3j+WnW9FxmFfqb8p//etfEZEee3mjRo2K3XffvfCs1VKsqeMQ6L7WmQBwo402iksuuaTkafPKysqioqKi1da38s/BafpDPqIx8JgxY0ZERBx22GExadKkeOihh2L06NHx1FNPFX5UrYzNN9+8qL+ioqLwpeHdd9+NiDRIaE1NTU3hgr8i8+fPj5NPPjluuummwrDKysqiaS677LKYM2dO3H333TF58uSISL8g/e53v2tRxryV3QallCMi/cLWVN++fSMiff7F22+/HRFpy81NtVXG5pq2qNmvX7/CfCMi3njjjYhIA43m8st9+eWX44QTTognnniiMK6ioqKkZTfV/MHq+c8xX5YDDzwwjj/++Ljjjjti/PjxccMNN8T+++9fCIvbMnTo0MLf+S8sV199dasPVJ8xY0ZMnTq1xTrkcrnYeOON21zG7rvvHn369Cn05z/DN998s/Ajc8qUKbHnnnu2eG++TA888EB8/etfj9/97neF54kde+yx8dOf/rRo3nml7p8r+nxL3f8iirdje8dhVVVVvPHGG7H77ru3OY8Vyf9Az9tkk00iIuL999+P0aNHxzHHHBNXXHFFzJo1qxCSNv0yWaqm57Ty8vKic+DqnGtWVP5S9sFDDjkkXnvttbjwwgvjrLPOirPOOiuGDRsWN9xwwwqfF9WeFZ1HWlPK8Z/X2me7omtMQ0NDXHjhhXH55ZcXfiQMGzas1FWJ2traqK6ubrHcgQMHxtixY1s8I7VpWfLHU74sK3MctGbbbbeN0aNHx4033hhnnnlm3HrrrVFRURF77bXXCt/3sY99rPB3qeeJiGjRCnK+rB9++GGr43fbbbe45JJL4v333y98puPGjSuaZq+99op77723aFjza8r666+/wh/wNTU18aUvfSlmz54dzz//fOH9pazbql7Hnn/++RbbuW/fvoV9O291r3PNddYx2lT+M//EJz6x0stv7zr2/vvvx7Bhw1r80yT/jLDZs2fHZpttFscee2xcc801cfXVV8cf//jHiGg815Zyjswr9dzfXGedo/NW5Ty0ps7hHfEZljJN03JHRPz617+OiRMnrrBsKzo3rMw5phSrep4spYzNK0Z8/OMfLwRwq/IZlvp9eEXX4lLKVopSjo9Sv++u6DjMK/U35ZtvvhkR0eK7+yc+8YmVCgC78rsU0D1kqhGQvFKbY8+f1BcsWFA0vLa2NiIihg8fHhHpxb2ioiL++Mc/xl133RUREQcddNBKlyv/X6bWDB48OCIiXnrppfjwww9bvPJf7Nvz7W9/O2666aY45ZRTCrXMfv/73xdNM3DgwLjrrrti9uzZcdNNN8X+++8fjzzySJsX04iV3wallCNixdsk/8Om+edT6n8KV7QfVFZWxtChQ1vd1g888EAsXbo09txzz3jxxRfj0ksvjb///e+xcOHCOOGEE0padlMrWsd8Wfbff//4v//7v3j22WdjxowZ7dawiYiiL775/75ec801ra7TbrvtFoMGDYqIiLq6uqL5PPXUU4Uf2O2ZP39+RERsscUWhVoo3/zmN1td5llnnRUREZ/61KfipZdeitdeey2uvPLKGDNmTFxzzTXx85//vNVllLp/rujzLXX/iyjeju0dhxtssEFUVlZGVVVV0TyaP/y9VPntmf9Smq/NcO+998Yf/vCHGDFiRIwZM2al57uibdNR55rm5S9lHywrK4uf/OQnsWjRorj//vvjuOOOi3feeSd22223QsNLq6K9Y6y59o7/plqrfbmi7XvNNdfEueeeG7vuumvcfvvtMWPGjELNkVJaM+zbt29UVFQUtm1TCxYsiK233rrksqzMcdCWI488MqZOnRovv/xyXH/99XH44YdHr169VvieptuslPNEft9pfq6fOnVqTJs2rRAEtnWt3mKLLQrTNL8+tHa9WJn9paGhIb7xjW/Eyy+/HLfeemvR8VjKuq3qdWyjjTZqsQ8sWrQonn766Vi4cGFh2Opc51rTWcdoU3fffXdERKFG08osv73r2MYbbxwfffRRi/nma73lw5P8uXby5Mlx0003xYQJE2LLLbeMiJU7R65s7eym69mWjjhHr8p5aE2dwzviMyz1c45Ia02fdtppcd9997VoSKG5FZ0bVuYcU4qVPU+WUsb8PxaaniOal3FlP8OV+T68umUrRSnHR6nfd0v5vVjqb8r8PlfKub75P5yalqkrv0sB3UMmA8BSbbHFFhERceeddxYNv//++yOisZp8796944gjjoi77747Jk2aFHvvvXeLmghN5W8Fae0/RG3JfxH+xz/+ERtvvHFsvPHGMWjQoDj44IPjmGOOaXdZdXV1kSRJTJo0KSZMmBA/+9nP4gtf+EJssskm8eSTTxamybc2esQRR8RGG20Uhx56aNx1110xatSoQrX61sq/MtuglHKUYosttoiKior485//XDT8wQcfLOn9K7LDDjvEjBkzYs6cOYXtvWjRothtt93immuuialTp8bs2bPj1FNPjdNPPz0mTJgQffv2Ldx+19o65Ldb01tPSvWNb3wjZs+eHeeee25EROyzzz4r9f58LcNHHnmksD4bb7xxXHbZZbH33nvHnDlzCsFB09tpX3jhhdhpp53a/FL82GOPFbVEnN/22267beE/vXfeeWdsuOGGhWX+/e9/j9122y3+/ve/x2uvvRYjR46M3/zmN7HVVlvFySefXJhH85aPI6Kk/bM9q7P/lXIc7rLLLvHAAw8UhX6l3pbV/Nbn/A/hfM2MMWPGxIgRI+Lmm2+O++67r6hlzNY0Pf5LtarnmvbKX8o++J3vfCe22Wab6NGjR+yzzz5x9dVXF1oefe+991brGFqR5tupveN/deRv2bn99tvjoIMOis0226zQMmV++e1dI0aPHh333Xdf0Q+HadOmxRtvvFF0e9WKdNR5OP+PnjPOOCMiim+5K0Up54l88NL0OKqtrY3x48fHd77zncK+1byWTf6fUaNGjYodd9wxIqLwCIWIiDlz5hT1r4pzzz03/vSnP8X3vve9FrVxS1m3Vb2ObbfddvHII48UhYC/+c1vYvz48e3erpy3Kvt5e8fo6rrzzjvjnnvuiT322KNFmF3K8tu7jm2zzTZRXV1daCU8Ij2f3HnnnTF+/PhC6DJhwoQYOnRoXHbZZfHCCy/E4YcfXph+dc6Rza3J74NNrcp5aE2dwzviMyz1c45Iz6fnnXdeVFZWxre//e2Vqo3V1KqeY0q5TpdyLinFdtttFxFRFPBXV1cXPeJlZY/xVfk+vKpla03z/bSU42NVvu+urny58r8hI9J/2jT93pSvEZmv8RmR/iO16ffhjjgOAVaoS59AWKJ8K6LnnXdeq6/8Q5jzD2xt2mhBkqQP4M63nNe8FeD8Q85POeWU5Mknn0xuuOGGwgO78606JUnjw2EjIrn55ptXWN5nn3228EDzBx54oPDg3uYPRf7a175WeHDwvHnzkoqKiqSioiI5//zzk8mTJxdavsq3ztaan/70p4V5v/jii4WHx0+aNCl56aWXkksuuaRQ7nzrUd/73veSiEguuuii5Iknnig8TPmQQw5ptfyrsg1KKceECRNatHSbfyD5e++9lyRJY8uQZ555ZvLkk08Wyh4reHh6/uHo+QcuJ0njg3jzD4z+29/+lkSkLZhde+21yZ///OdCq2JvvfVWUltbW2gg5tFHH02efPLJQiuUEVFoRa7pA/STJCk87Pf6669PGhoaWl3HX//614Xl5C1evLiotcYVyT98vGnDNUmSJHvssUcSkbauNnny5OTCCy9MIiI59thjkyRJWwWsqKhIhg4dmtx7773J3XffXWjEoaamps3GFfbYY4/kiSeeSG666aakoqIi2XPPPQsPs//xj3+cRKQtkf3pT39KbrzxxqSioiIZNWpUsnTp0qShoSEZP358UllZmVx//fXJY489Vmgl7brrrmt1/drbP0v5fEvZ/1rbjqUch/l9Z5999kn+/ve/F1oSjyitEZATTjghmTJlSqG1xOYtQOcf5N/0OGhL8+N/RefAfCM+q3KuKbX87e2DDzzwQBKRNij08MMPJ7feemsydOjQokZwmh9DzbW2n7Z3Hmm+ndo7/pOk9f2jlGvML37xi8IDyl9++eXk1ltvLRzb+dYcm59jmzcCkm+MY4899kgee+yx5MEHH2zxEPvWjoOXXnqp6PxcynGwokZA8vINF+UbSWhL/uH2f/3rX4uGt3eeSJKk0BrtNddckzz66KOFffLZZ59N6uvrC5/PVVddlTz99NOFa/lRRx1VWE5+HqeffnpyzTXXFFoSbt4ISPNr8UEHHZQMHTo0SZLi/Sv/IPmKiork5ptvTm666aaiV6nrtirXsSlTphTm+/jjjyfXX399UllZWWg4oCOuc0nSshGQUo7RAw88sOjB+M3lGwEZO3Zs4XvaCSeckOy+++6F7dm0MYamjYC0t/z2rmP5ZQ8dOjS5++67k7///e+F4yD/meXlH+AfUdz6einnyLauw801P9Y76xzdvHGBVTkPJcmaOYd3xGdYyjTNW2jNt7Kb/y7RWiMgKzo3JEn755jWNL/+rOp5spQy1tXVFVq//c1vfpNMmTKl0J9vaKOUz7CpUr8Pt3ctLqVsrWm+n5ZyfLS3j5VyHJZyvW++j+Wvlddcc00yZcqUQmvJTVv4HT16dBIRydVXX53cddddhe/aTafpiOOwvfM00H2tMwFg/kLT2it/4X3kkUeSiLT13qYqKiqSY445JkmSlgFgbW1toZWt/GvixInJ7Nmzi+bRtIWupsFga2pra5Nddtml8IMpf9E+55xziqY75JBDir40vPjii0XrOmLEiBYtRTX35ptvFpqZP/7445MXXnihcOHIL/+Pf/xj0Ze+efPmFVrmyr8OOOCAwg+R5uVflW1QSjl22WWXFl8W8i1U/vvf/y4s86yzzip8cc1fnJt/YW8qf0HOf8ZJkiRTp05NItLWv/ImTZpU2HYRkeyyyy7JHXfcURh/6623Fr7cRaQtcl1//fVFP57Hjh2b7LHHHoX3NN2X3n///VbX8dprr00iInn77beLhh977LFJRHGLja3JBy/Nf3jMnj270Bpt/rg45phjirbTCy+8UPjykZ8m3xLiY489VvRlZ8KECUllZWUyfvz4omOj6fyWLVtW9CMqvy/lW+ZOkiR5/vnnkz333LNomrPPPrvNlm3b2z9L+XxL2f/a2o6lHIe33npr4ViIiOTEE09MIhpbX20uH6AddthhRfvciSee2GL5//nPfwr7Y3uaH/8rOgc2bUF1Zc81pZa/lH3we9/7XtE8Ro8eXfgBkSQtj6Hmmu+npZxHmm+nJGn/+G9t/yjlGjN//vyibVBRUZFcccUVyd57751UVFQk9fX1Lc6x+QDw7LPPLswz/+MvP5+xY8cmTz31VGF8a8dBvkXFW265JUmS0o6DpuewfAD4t7/9rWj98i2KNm11sTWTJ09OIiJ55JFHioaXcp6YNWtW4Qd8/vWTn/ykMP79998v/JjKb9dTTz216DxSU1OTHH/88cnQoUMLn8kee+xR+HHc1rW46Q/opvvXBRdcsMLvHqWu26pcx5Ikbe2y6T4wZsyYQsvQHXWdyx8r06dPLwxr7xgdOnRoUSjS3MyZM1vdXiNGjEiOPfbY5PXXXy+afvfddy8EgKUsf0XXsSRJkqeffrro2j1s2LDk+uuvb1HO119/PYlIg4Xm2jtHtnX9aK75sd5Z5+gFCxYkEZH8+Mc/TpJk1c5DSbJmzuFJ0jGfYXvT5I/ffIu5SZKGIs2P7+rq6pLODUnS/jmmNc2vP6t6niy1jFVVVUXn0jFjxiR77rlnUcjW3mfYXCnfh0u5FpdStuZa209LOT5WtI+VchyWcr1vvo/NnTu3xfqNGDGiKNx75plnCq3AR6QtFx9++OFF26AjjsP2ztNA97VOBIAdKV/jb8mSJUXDly9fnrz99tsthuctXbo0qaysLPxnqBRz5sxp98tha6qqqoq+jLenrq4umT17dtEPoTlz5iSzZs1a4fsWL16cvP76622uc/Pyr+o2aK8cpVi+fHny7rvvtvrfydX1wQcftFkLI0nSQKa9wLOpBQsWJAsWLFjpcuS/xNXV1a30e5uqra1N3n777RVuqzlz5iTvvvvuCpfV9L+5H3zwQdGX6Obq6uqSd999N1m8eHGb08ydOzd5++23S16/9vbPUqzO/lfKcfjvf/97pcvX0NCwwm2VD3HytXja09rxX6qVPdckSfvlT5L298H6+vrkjTfeSKqqqlodv6rH0Iq0tZ3aO/5X1eLFi5P33nuv3eNwRdeIhoaGZPr06cn8+fNXqyyrex7O/4h78803V6scpZwnFi1alLz55pttHleLFi1K3nnnnRbbdcaMGckNN9yQzJw5szCsvr4+GTVqVIsfpZ2hlHVbletYfh/48MMPV6t8K7uft3eMdrZSlt/edWzu3LltBlClWpVzZGvW1PfB5lb1PLSmzuEd8Rl2xOdcitU5x6zMdbqUc0kpampqVrhdVuUYX9nvw6tatta0tp+WcnyU8n23o1VVVSUzZsxIkiStNdo0AMybPn16u8fI6h6HAK3JJUmSRDdQX18fM2bMiKOPPjomT54cpa52dXV1vPrqq3HHHXfEFVdcEU8++WSMHz++k0u7drENOtdTTz0Vr7/+ehxxxBFxySWXFJ611dV23nnnqK+vLzwzjM41ffr0mD59epx33nmFB1WvTIMc0NFeeeWVmDlzZnz961+P7bffPv7yl790dZHa9M4778Tw4cNj//33j8suuyz69esX1113XZx33nlx4YUXxve+972uLiKwDnOOYVV84QtfiOnTp8err77a1UUBiIiIVWu+bB30yiuvxKc//emIiDjvvPNKft+MGTNip512ioiIk046qVsGX7ZB59prr72iuro6xo4du0qtDJMNf/nLXwoPsL7jjjuEf3S5iy66KG655ZaoqKiIX/ziF11dnBUaNmxYXHjhhXHRRRfFiBEjCsOPPvroteafKsC6yzkGgCzoNjUAq6qqYsqUKTF69OhCU/SlqKuri0ceeSQGDx4c22+/fcnNwWeJbdC5nnnmmaipqYmddtop+vTp09XFKfj3v/8dEY2t09G55s2bF//4xz9i1KhRhVaBoSu9/fbb8frrr8f48eNj8ODBXV2cktTX18cLL7wQCxcujDFjxsTAgQO7ukhAhjjHsDKmT58edXV1scUWW3R1UQAiohsFgAAAAADQHanKBQAAAAAZJgAEAAAAgAwTAAIAAABAhgkAAQAAACDDBIAAAAAAkGECQAAAAADIMAEgAAAAAGSYABAAAAAAMkwACAAAAAAZJgAEAAAAgAwTAAIAAABAhgkAAQAAACDDBIAAAAAAkGECQAAAAADIMAEgAAAAAGSYABAAAAAAMkwACAAAAAAZJgAEAAAAgAwTAAIAAABAhgkAAQAAACDDBIAAAAAAkGECQAAAAADIMAEgAAAAAGSYABAAAAAAMkwACAAAAAAZJgAEAAAAgAwTAAIAAABAhgkAAQAAACDDBIAAAAAAkGECQAAAAADIMAEgAAAAAGSYABAAAAAAMkwACAAAAAAZJgAEAAAAgAwTAAIAAABAhgkAAQAAACDDBIAAAAAAkGECQAAAAADIMAEgAAAAAGSYABAAAAAAMkwACAAAAAAZJgAEAAAAgAwTAAIAAABAhgkAAQAAACDDBIAAAAAAkGECQAAAAADIMAEgAAAAAGSYABAAAAAAMkwACAAAAAAZJgAEAAAAgAwTAAIAAABAhgkAAQAAACDDBIAAAAAAkGECQAAAAADIMAEgAAAAAGSYABAAAAAAMkwACAAAAAAZJgAEAAAAgAwTAAIAAABAhgkAAQAAACDDBIAAAAAAkGECQAAAAADIMAEgAAAAAGSYABAAAAAAMkwACAAAAAAZJgAEAAAAgAwTAAIAAABAhgkAAQAAACDDBIAAAAAAkGECQAAAAADIMAEgAAAAAGSYABAAAAAAMkwACAAAAAAZJgAEAAAAgAwTAAIAAABAhgkAAQAAACDDBIAAAAAAkGECQAAAAADIMAEgAAAAAGSYABAAAAAAMkwACAAAAAAZJgAEAAAAgAwTAAIAAABAhgkAAQAAACDDBIAAAAAAkGECQAAAAADIMAEgAAAAAGSYABAAAAAAMkwACAAAAAAZJgAEAAAAgAwTAAIAAABAhgkAAQAAACDDBIAAAAAAkGECQAAAAADIMAEgAAAAAGSYABAAAAAAMkwACAAAAAAZJgAEAAAAgAwrjzi/q8sAAAAAAHSSXJJE0tWFAAAAAAA6h1uAAQAAACDDBIAAAAAAkGECQAAAAADIMAEgAAAAAGSYABAAAAAAMkwACAAAAAAZJgAEAAAAgAwTAAIAAABAhgkAAQAAACDDBIAAAAAAkGECQAAAAADIMAEgAAAAAGSYABAAAAAAMkwACAAAAAAZJgAEAAAAgAwTAAIAAABAhgkAAQAAACDDBIAAAAAAkGECQAAAAADIMAEgAAAAAGSYABAAAAAAMkwACAAAAAAZJgAEAAAAgAwTAAIAAABAhgkAAQAAACDDBIAAAAAAkGECQAAAAADIMAEgAAAAAGSYABAAAAAAMkwACAAAAAAZJgAEAAAAgAwTAAIAAABAhgkAAQAAACDDBIAAAAAAkGECQAAAAADIMAEgAAAAAGSYABAAaKGuLuL99yM++CCioWHV3t+e+vqOmU8p0wAAQHcmAAQACmbMiDjiiIiePSOGDo3YbLOIAQMiTjstorp6xe9taIj45S8jhg9P39+/f8Q3vpGGiE09+mjEwQdHlJdHjBwZcemlxeOrqiJOPTViyJB0PptvHnH++RFLlqzcNM3ddVdELpd2V8bzz0fcccfKvae5//wnXfYxx6zefJqrrY246KLG9X788XQ5Dz3Usctpz8KF6XIvvHDl33vTTel7d9659fHjxkV87nPp3w88kE77xBOlz3/ZsvQ955xT+nsuuCB9z+LFbU/TfNvTtqeeSrfnvfd2dUkAoPsSAAIAEZGGf7vtFjFrVsTtt0fMnBnx7rsRP/95Gizts8+KQ8D//d+IE0+M2GSTiKuvjvja19JwZ5990hAmv4w99khDwZtuivjMZyLOPLM4BPzGNyKuvDJi990jfv3riO22izjvvIjTT1+5aZpLkuJuKZYti9hxx4iXXir9PSta9qrUplyRK66IOPvsxtqUFRUR48en4euatDrrd/31aXfKlIhXXmk5vr6+5XxX5jMsK0u3yaablv6eUvaV5tue9q3M5wYAdCwBIAAQM2em4d+OO0b8+c8RH/94xO9/H3H33RE77RTxyCNpGLbPPmnNp9b89KdpAPXYYxHHHRdx7bVpLb2XX4547rl0mjPPTLv33BNx6KERN9wQsffeET/+ccTSpWngeM89EQcdFPGHP6Q15u69N2L06LR24fLlpU2ztskHcoMGdex8mwdjY8ZEPPlkGnitC955J91fLrgg7c+HgR2pvDzdJscd17Hz7egwFwCgMwkAAYC4446IwYMjbr454pZbIv7rvyIefjjiH/+I2GqrNKh58MGIN99s/fbLJUvSQO9nP0sDl7zRo9PunDlpYHLPPREHHBAxcGDjNF//elqz8J//TJ/nd+qpESedVDz/7bdPuwsXljZNKZ57Lr299JFHIr70pfQWxZEj0xAxSdLbP/O3pV57bcSuu6Z/52913m679D077BBx662N862tTed77bXpNJtvHvGXv0RUVkaMGpVOc9116TLvuy8NWHO59D2PPto4n+XL01pm48al43O5iH33jXj99XT81VdHXHVV+veuu6Zh6AsvpNM/9VTjfO64o3EeI0dGfP/7jbetLluWjrvttjRIHTIkfZ14YvHtr3PmpNt7m23S+fTvnwZq8+eXtq3bcsstaff//b+IL34x4je/WfFtt+15+ul0fW68MV2PHXZo/JyvvrpxuieeSG9D798/nebOO9Np/vzn4vndc086PpeL+OxnI/72t3R4a9s+Ih02cmTjtj777DTYbsu3vpXeuv7Vr6ZlOeOMdPjbb0d8+cvpsCFD0hqvs2Y1vq+2Nv2Mhgxp3Hd+97viebc3j2uuifjv/464/PJ0mr33Tudz3nnF8/noo+LtV1MTcfLJ6a3+/ftH7LVXut819cQT6bLzt3ZPmdL2NgAA1pAkicTLy8vLy8ure7/23z+SSy+NpK4ukohILr64cdwVV0Tym9+kfx9+eCRnn136fPfcM53fO+9EMmtW+veZZxZP87e/pcPvuqv1edTUpOOHDm17OaVMc+ed6TR33pn2//WvaX9EJKNGRXL66ZEMG5b2339/JEuXRnLOOWn/hAmRXHhh+r4zzmh8z4UXNq7jr39dXJaISCor0zLddVckTzyRboMkieS88xqn2XPPSE49tbF/zpx0mvPPT/v32SeSiy5KP6OISEaMiKShIZLJkyPZZZd02Pe+F8mTT0by6KNp/wMPpPO44Ya0f8yYSC67LJJDD22cZ5JEUlvbuNyKikhOOqlxnk0/p/ywo46K5Mc/jmT8+LT/hBPS8VVVaf+PflT6vlFfn26bMWPS/ltvTedxyy3F040ZE8nuu6d/339/Os3jj7c+zwcfbFyfESPSdfrPfxq3UZJE8uabjdOcdlokX/taY/9vf9vy8znkkEiOPrqxf/bs1rf9vfc2fp6XXhrJAQe0vr83feW3Y0Qko0en+8GMGWm5I9Ll/vCHjfv2okXF++BRR6XH6tixaf+996bjS5nHD35QvOwxYyL54hfT9y1Z0ljGa69Np3nyyfT8kF9Wfr/MHzMvvdS4fSsq0uGXXBLJgQc2LufPf+76c52Xl5eXl1d3fUVXF8DLy8vLy8ura1/Ll6c/zp9+OpLXXkv/fv311qe98cY0AChlvr/5TWMAkSSRvPVWy3AxSdLgICINGprPo6EhksMOS8fffHPryyllmiRpOwA84IDGafLrf/rpaf/SpWn/D36Q9r/3XtqfD6SSpDEUqahIA7V8AFhZmQZj+TI2LUs+YPrFLxqHXXddOuyee9LpR4xIg5m6usZpDjoonWbevLQ/HxLW1KT9TQPAurq0TCNGpOXKz+OYY9JpJk9uDAArKyOprk7HL1mSBkWjR6f977yTTnPqqcX7TEVFY3i3KgHgE08Ub4Pq6rR/l12Kp1uVAHDixMbtnv8M8wHgwQen/W+80XK7Ng8Am+6rl12WDnvssda3fT6Uywe4DQ3psk45pe1tkA8An3mm8T3HHZcOe/TRxuny4eKVVzZuk/znkySRzJyZbrcbbkj7S5lHPgC84orGZd9+e3GQmCTpfIcNS8ffdls6/vzzG8d/9FE6bP/90/5DDkn7Z8xonCa/zQWAXl5eXl5eXfdyCzAAdHO5XPrsvrq69BWRNpyQt2RJ4/DlyyN69Wp/nr/7XXp74+jRjQ189OzZuLym8svKNxSSV1cXcfzxaWMhhx8eccghLZdTyjTtOeCAxr9HjEi7bTV28vzzaXfUqPQW6YcfTm/b/eQn0/e8807jtJ//fMQGG6R/N1/nvH33bfx7223Tbk1NOv20aentq0uWpM9RvO229FmNEW0/h7Gp115Ly/Ttb0est17j8P32S7vPPNM47Etfilh//fTv3r3TstTUpP1bbJE2dHHppRFz56a3F19/fUSfPo3TrIqbbkq7X/hCettvWVl6W+4TTzTe5ryq8uvY2nb/xz/Sz/mTn2w5fXP//d+Nf++2W9qdPr31afOf32c+E3HJJWmDJn/4Q3pb/IpUVESMHdtY3r/+Nf172bLGfSwvf2v39tun+8Ree6W3TdfXp7cnH354Or6UeeTl98FcLr0NOyJi0qTGdX3iibRl8Fwu3XYRaYMq+fm++GK6PR95JB333HMREyemLYjnHX30ircBAND5ytufBADIsh490hDm8cfTZ3tFRPzpT43PIzv66PQ5ZrffnoZde+654vn97GcRp52WNkjxwAMRAwakwzfcMO0uWFA8fb6/oqJx2LJl6TMFJ02KOPLItKXf5mFOKdOUYqONGv8uK0vL0VbLrv/+d9q9+uriZ8rlzZiRNqASEfGJT6zcsvv0Sbv5Zb/8csQJJxQ/c7HpNmpP/nlvTYOYiIhddmksa96QIcXTrL9+cSB7111pC8tNA86Ixs92ZS1alD4jMaI4iMu74YY0RFtVH/tY68MXL07X+9hji4d/5jOtT9+05eB+/dJuW41/HHJIGrpeeGHEWWelr2HD0nXJPz+yNVttVdz/xhtpd++9W0779ttp97LL0ucy3n13xOTJ6bA99kiD9803L20eeUOHNv7dt2+6ba65Jt2///jHdPjXvpZ233037bYV6FVVpcveffe2lwEAdA0BIAAQn/1s+qP/5JPT7rHHpi3rLlkS8eyzabDx4Ydp4HDXXW3P59JL05Z+99gjbVghXwMuIg0XKipa1qDKB1Gbb552ly5NG0W4++40hLzoouIaiaVOU6qVeV++8ZJrronYf//Wx+drS5ZSU7KtZS9dmgattbXpNv3MZ9LalBddlL5KkQ8Xmweu+dqDw4c3DuvRo+35vPRSxIEHprW8rrsubRTjU5+KmDAhrRG4Ku65J+0ee2zaUEpT554b8atfpY1jNK25uDLK2/iGmw9ZmzcU01ZNxpXZN8rKIn7yk7SRlccfT9fxV79Kaw7W1ra9Ls33k8rKdNizz7acNl+LduDA9Dj86KO0cZ477kiPhaOPbmxwpr155DXfVoccku7fkyentTQnTIjYcst03ODBafell9JlNLfBBunwqqri4atTUxQA6BhuAQYACjV8vvzliG9+M+LJJ9PbC48+Og3+NtwwvaV1990ba5A1d/PNafh3wAER999fHP7l7bVXGlzkW6GNSIPCiMZWfP/nf9Iw45JL0ldrIUwp03SEfI3C5cvTbr621iOPRGy8cePrssvS2lZz5nTMcqdOjZg9O23t+PTT0xCmb9+Iv/89HZ8PGfPly/c3tcUWaTe/ffPuvz/t5m9ZbU++BuJVV6W3dW+/fRqgPftsy9u2S5VvsfbCC9MQsOnryCPTW5fzIWFHyuXSz+mee4pbMM7f8rqy84po3Pbf+U7aSnKPHhH77JPWoDv11HTce++VPt8ddkhD8TlzGvevRYvSIPGaa9Ll7bprelvuRhultWDvuiu9LT1/i25781iRCRPSGnuXXZa27pu/rTiisRXrf/yjcb6DBqW3bh9zTDpul13Smr9NQ7/87cEAQNdRAxAAiIED0+d57b57GvQdfnhaw662Nn2O2dVXp4HS7be3rEEUkQZCxx+f/j1sWMTFFxeP/+//Tmt6nXxyelvhIYekNfceeyyd55VXpreTPvVUxG9/m9YUrK1Na4E1ddJJ6bPx2psmX1NvdeXX9Z57IkaOTLfLHnukZa6oSIPTZ5+N+OlP0/Bqs83SoGV1bb11Ov/bbktrZ663XnqLcz6My4cr+ef2XX55xFe+UjyPior0NtSLL06DqK9+NX223sknp7UJd9qptLJMmJB2r7kmDXvmzIn4wQ/SYc1rF+bNnJkGwRMnRpx9dvG46dPTWmsHHND653TooWmZr7km4qCDSivjyvjBD9KQavfd0yD59dfbf05fa5pv+z33bNwPDj00rZ03aVJaIy7/bMlSfO97aYB24IER3/1uGrL96EfprbVf+1paY2/nndOaoCNHpn+/9FIaGuefgdnePFakrCwN/s89N+0/8MDGcd/6VjqfM85IA+rPfCbixhvT/fLWW9NQNH+MH3xwWhvyzTfTfww0taL9AwDoJF3dComXl5eXl5fX2vOaOTOSCy6IZMKEtNXOiEj23jttKXTRorbfN3ly4/StvW66qXHaa65pHF5REclJJ6WtyiZJJD/84Yrn8+67pU3TWhn/9Kd0/J/+lPY/8khja7hNp6uoSFvKzfefeWbjvN9/P5LZsxtbNY3/vwXdY45pbJl30aKWLaU2f+VbkG26TV9+OR12yy1p/623xv/X3h2zxBGEYQCeVFYJwSJVwFQWFhaCYCMR/4GFlaWdYK/Cdbb+ABsLGwsFsUxjkTKVTUSQQFSIoCiIRVKom+JzOcU7NUZRPp8HBvZml7292eWKl535qp6e5vcMDZVqYSG25+fjmO3t+P5SSjUxERVqS4lquFUVVX6vXn+5rJB7cNDcX0qpGo3r1zc6GpWA68/T083vKZeVnaemYvv791KdnMT27Gwcv7MTn8fHb/72ubnYt7LSfnz6++OYHz9ie3g4+usqwF+/3v4crq83++oqwDMz14+rn/G+vrjuq2Pf6v5sbkbf4mLrsW81Tr29MT7tfufAwM2qx1VVquXl6+cZHLw+XsfHzWq7dRsZKdXR0f3P0WhE/9UK0XXb2op9o6M3921sxJjV5+3uvln9eWkpKgfXx0xONitc3/V8aJqmaZr2NO1NVZXqgdkhAJDY6Wm8bVSvm/aYzs6ioEZXV/v12l6Ses24d++afX/+lPLrVxT7eKopyKXEG3Pv37cvAHJ+XsrxcbxN124sz85K2d2NNxQ7Oh52HRcXMZX148f7rW/4Uq2txVgODzf7Vldj+vuXLzFN/b5ajf3FRRTa+PCh9TT4f7G/H/ers7P1/t+/475++tT+vt51joc6OYn/iNsKfOzuRoGZhz5zAMDjEQACAPBqjI+XsrAQod/nzzGlfGwspuzu7f1/aAcA8BIJAAEAeDV+/oz16a5WyH37Ntaj7Ot7rqsCAHhaAkAAAF6dw8NSvn2LojXd3VG9FwAgKwEgAAAAACT2hEtWAwAAAADPTQAIAAAAAIkJAAEAAAAgMQEgAAAAACQmAAQAAACAxASAAAAAAJCYABAAAAAAEhMAAgAAAEBiAkAAAAAASEwACAAAAACJCQABAAAAIDEBIAAAAAAkJgAEAAAAgMQEgAAAAACQmAAQAAAAABITAAIAAABAYgJAAAAAAEhMAAgAAAAAiQkAAQAAACAxASAAAAAAJCYABAAAAIDEBIAAAAAAkJgAEAAAAAASEwACAAAAQGICQAAAAABITAAIAAAAAIkJAAEAAAAgMQEgAAAAACQmAAQAAACAxASAAAAAAJCYABAAAAAAEhMAAgAAAEBiAkAAAAAASEwACAAAAACJCQABAAAAILG/WTMoiz93V3cAAAAASUVORK5CYII=\"\u003e", "metadata": "{}", "quality": [], "quality-suggestion": null, "quality-suggestion-metadata": { "agent": null, "score": null, "type": null } } ``` ### Data Fields Among the dataset fields, we differentiate between the following: * **Fields:** These are the dataset records themselves, for the moment just text fields are supported. These are the ones that will be used to provide responses to the questions. * **image** is of type `text`. * **html_code** is of type `text`. * **Questions:** These are the questions that will be asked to the annotators. They can be of different types, such as `RatingQuestion`, `TextQuestion`, `LabelQuestion`, `MultiLabelQuestion`, and `RankingQuestion`. * **accuracy** is of type `rating` with the following allowed values [1, 2, 3, 4, 5, 6, 7]. * **quality** is of type `multi_label_selection` with the following allowed values ['clean code', 'efficient', 'proper tags and classes']. * **correction** is of type `text`. * **Suggestions:** As of Argilla 1.13.0, the suggestions have been included to provide the annotators with suggestions to ease or assist during the annotation process. Suggestions are linked to the existing questions, are always optional, and contain not just the suggestion itself, but also the metadata linked to it, if applicable. * (optional) **accuracy-suggestion** is of type `rating` with the following allowed values [1, 2, 3, 4, 5, 6, 7]. * (optional) **quality-suggestion** is of type `multi_label_selection` with the following allowed values ['clean code', 'efficient', 'proper tags and classes']. * (optional) **correction-suggestion** is of type `text`. Additionally, we also have two more fields that are optional and are the following: * **metadata:** This is an optional field that can be used to provide additional information about the dataset record. This can be useful to provide additional context to the annotators, or to provide additional information about the dataset record itself. For example, you can use this to provide a link to the original source of the dataset record, or to provide additional information about the dataset record itself, such as the author, the date, or the source. The metadata is always optional, and can be potentially linked to the `metadata_properties` defined in the dataset configuration file in `argilla.yaml`. * **external_id:** This is an optional field that can be used to provide an external ID for the dataset record. This can be useful if you want to link the dataset record to an external resource, such as a database or a file. ### Data Splits The dataset contains a single split, which is `train`. ## Dataset Creation To create this dataset we used the following snippet: ```python import argilla as rg from argilla.client.feedback.utils import image_to_html from datasets import load_dataset # Load the original dataset dataset = load_dataset("HuggingFaceM4/WebSight", split='train', streaming=True) # Shuffle the samples to avoid any bias shuffled_dataset = dataset.shuffle(seed=50, buffer_size=5_000) # Take a sample of 5000 subset = shuffled_dataset.take(5000) # Format the text to be rendered in markdown def add_json_formatting(example): example['text'] = '```json\n' + example['text'] + '\n```' return example updated_subset = subset.map(add_json_formatting) # Set a temporary path to save the image temp_img_path = "temp_img.png" # Iterate over the samples in the subset records = [] for entry in updated_subset: # Save the image to the temporary path entry["image"].save(temp_img_path, format="png") # Add the records to the FeedbackDataset record = rg.FeedbackRecord( fields={ "image": image_to_html(temp_img_path, file_type="png"), "html_code": entry["text"], }, suggestions = [ { "question_name": "correction", "value": entry["text"], }], ) ds.add_records(record, show_progress=True) ``` [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
argilla/websight-5K-multimodal
[ "size_categories:1K<n<10K", "rlfh", "argilla", "human-feedback", "region:us" ]
2024-01-25T08:09:54+00:00
{"size_categories": "1K<n<10K", "tags": ["rlfh", "argilla", "human-feedback"]}
2024-01-25T08:41:32+00:00
[]
[]
TAGS #size_categories-1K<n<10K #rlfh #argilla #human-feedback #region-us
Dataset Card for websight-5K-multimodal ======================================= This dataset has been created with Argilla. It is a subset of 5000 records from the Websight collection, which is used for HTML/CSS code generation from an input image. Below you can see a screenshot of the UI from where annotators can work comfortably. !image/png As shown in the sections below, this dataset can be loaded into Argilla as explained in Load with Argilla, or used directly with the 'datasets' library in Load with 'datasets'. Dataset Description ------------------- * Homepage: URL * Repository: URL * Paper: * Leaderboard: * Point of Contact: ### Dataset Summary This dataset contains: * A dataset configuration file conforming to the Argilla dataset format named 'URL'. This configuration file will be used to configure the dataset when using the 'FeedbackDataset.from\_huggingface' method in Argilla. * Dataset records in a format compatible with HuggingFace 'datasets'. These records will be loaded automatically when using 'FeedbackDataset.from\_huggingface' and can be loaded independently using the 'datasets' library via 'load\_dataset'. * The annotation guidelines that have been used for building and curating the dataset, if they've been defined in Argilla. ### Load with Argilla To load with Argilla, you'll just need to install Argilla as 'pip install argilla --upgrade' and then use the following code: ### Load with 'datasets' To load this dataset with 'datasets', you'll just need to install 'datasets' as 'pip install datasets --upgrade' and then use the following code: ### Supported Tasks and Leaderboards This dataset can contain multiple fields, questions and responses so it can be used for different NLP tasks, depending on the configuration. The dataset structure is described in the Dataset Structure section. There are no leaderboards associated with this dataset. ### Languages Dataset Structure ----------------- ### Data in Argilla The dataset is created in Argilla with: fields, questions, suggestions, metadata, vectors, and guidelines. The fields are the dataset records themselves, for the moment just text fields are supported. These are the ones that will be used to provide responses to the questions. The questions are the questions that will be asked to the annotators. They can be of different types, such as rating, text, label\_selection, multi\_label\_selection, or ranking. The suggestions are human or machine generated recommendations for each question to assist the annotator during the annotation process, so those are always linked to the existing questions, and named appending "-suggestion" and "-suggestion-metadata" to those, containing the value/s of the suggestion and its metadata, respectively. So on, the possible values are the same as in the table above, but the column name is appended with "-suggestion" and the metadata is appended with "-suggestion-metadata". The metadata is a dictionary that can be used to provide additional information about the dataset record. This can be useful to provide additional context to the annotators, or to provide additional information about the dataset record itself. For example, you can use this to provide a link to the original source of the dataset record, or to provide additional information about the dataset record itself, such as the author, the date, or the source. The metadata is always optional, and can be potentially linked to the 'metadata\_properties' defined in the dataset configuration file in 'URL'. The guidelines, are optional as well, and are just a plain string that can be used to provide instructions to the annotators. Find those in the annotation guidelines section. ### Data Instances An example of a dataset instance in Argilla looks as follows: json\n\u003chtml\u003e\n\u003cstyle\u003e\nbody {\n font-family: Arial, sans-serif;\n margin: 0;\n padding: 0;\n border-box;\n}\n\nheader {\n background: #ff0;\n text-align: center;\n padding: 20px;\n}\n\n#video-container {\n width: 70%;\n margin: auto;\n text-align: center;\n}\n\n#video-player {\n width: 100%;\n height: 300px;\n background: #f0f;\n}\n\n#chef-tips {\n margin: 40px;\n}\n\n#chef-tips p {\n color: #000;\n line-height: 1.6;\n}\n\nfooter {\n background: #ff0;\n text-align: center;\n padding: 20px;\n position: fixed;\n width: 100%;\n bottom: 0;\n}\n\u003c/style\u003e\n\u003cbody\u003e\n \u003cheader\u003e\n \u003ch1\u003eInter\u0027national\u003c/h1\u003e\n \u003c/header\u003e\n \u003cmain\u003e\n \u003csection id="video-container"\u003e\n \u003cvideo id="video-player" src="#" controls\u003e\u003c/video\u003e\n \u003c/section\u003e\n \u003csection id="chef-tips"\u003e\n \u003ch2\u003eChef\u0027s Tips\u003c/h2\u003e\n \u003cp\u003eEnjoy the tasty and healthy recipes shared by the best internationally recognized chefs. Discover the latest cooking trends and techniques.\u003c/p\u003e\n \u003c/section\u003e\n \u003c/main\u003e\n \u003cfooter\u003e\n \u003cp\u003e\u00a9 2022 Inter\u0027national. All rights reserved.\u003c/p\u003e\n \u003c/footer\u003e\n \u003c/body\u003e\n\u003c/html\u003e\njson\n\u003chtml\u003e\n\u003cstyle\u003e\nbody {\n font-family: Arial, sans-serif;\n margin: 0;\n padding: 0;\n border-box;\n}\n\nheader {\n background: #ff0;\n text-align: center;\n padding: 20px;\n}\n\n#video-container {\n width: 70%;\n margin: auto;\n text-align: center;\n}\n\n#video-player {\n width: 100%;\n height: 300px;\n background: #f0f;\n}\n\n#chef-tips {\n margin: 40px;\n}\n\n#chef-tips p {\n color: #000;\n line-height: 1.6;\n}\n\nfooter {\n background: #ff0;\n text-align: center;\n padding: 20px;\n position: fixed;\n width: 100%;\n bottom: 0;\n}\n\u003c/style\u003e\n\u003cbody\u003e\n \u003cheader\u003e\n \u003ch1\u003eInter\u0027national\u003c/h1\u003e\n \u003c/header\u003e\n \u003cmain\u003e\n \u003csection id="video-container"\u003e\n \u003cvideo id="video-player" src="#" controls\u003e\u003c/video\u003e\n \u003c/section\u003e\n \u003csection id="chef-tips"\u003e\n \u003ch2\u003eChef\u0027s Tips\u003c/h2\u003e\n \u003cp\u003eEnjoy the tasty and healthy recipes shared by the best internationally recognized chefs. Discover the latest cooking trends and techniques.\u003c/p\u003e\n \u003c/section\u003e\n \u003c/main\u003e\n \u003cfooter\u003e\n \u003cp\u003e\u00a9 2022 Inter\u0027national. All rights reserved.\u003c/p\u003e\n \u003c/footer\u003e\n \u003c/body\u003e\n\u003c/html\u003e\n While the same record in HuggingFace 'datasets' looks as follows: json\n\u003chtml\u003e\n\u003cstyle\u003e\nbody {\n font-family: Arial, sans-serif;\n margin: 0;\n padding: 0;\n border-box;\n}\n\nheader {\n background: #ff0;\n text-align: center;\n padding: 20px;\n}\n\n#video-container {\n width: 70%;\n margin: auto;\n text-align: center;\n}\n\n#video-player {\n width: 100%;\n height: 300px;\n background: #f0f;\n}\n\n#chef-tips {\n margin: 40px;\n}\n\n#chef-tips p {\n color: #000;\n line-height: 1.6;\n}\n\nfooter {\n background: #ff0;\n text-align: center;\n padding: 20px;\n position: fixed;\n width: 100%;\n bottom: 0;\n}\n\u003c/style\u003e\n\u003cbody\u003e\n \u003cheader\u003e\n \u003ch1\u003eInter\u0027national\u003c/h1\u003e\n \u003c/header\u003e\n \u003cmain\u003e\n \u003csection id="video-container"\u003e\n \u003cvideo id="video-player" src="#" controls\u003e\u003c/video\u003e\n \u003c/section\u003e\n \u003csection id="chef-tips"\u003e\n \u003ch2\u003eChef\u0027s Tips\u003c/h2\u003e\n \u003cp\u003eEnjoy the tasty and healthy recipes shared by the best internationally recognized chefs. Discover the latest cooking trends and techniques.\u003c/p\u003e\n \u003c/section\u003e\n \u003c/main\u003e\n \u003cfooter\u003e\n \u003cp\u003e\u00a9 2022 Inter\u0027national. All rights reserved.\u003c/p\u003e\n \u003c/footer\u003e\n \u003c/body\u003e\n\u003c/html\u003e\njson\n\u003chtml\u003e\n\u003cstyle\u003e\nbody {\n font-family: Arial, sans-serif;\n margin: 0;\n padding: 0;\n border-box;\n}\n\nheader {\n background: #ff0;\n text-align: center;\n padding: 20px;\n}\n\n#video-container {\n width: 70%;\n margin: auto;\n text-align: center;\n}\n\n#video-player {\n width: 100%;\n height: 300px;\n background: #f0f;\n}\n\n#chef-tips {\n margin: 40px;\n}\n\n#chef-tips p {\n color: #000;\n line-height: 1.6;\n}\n\nfooter {\n background: #ff0;\n text-align: center;\n padding: 20px;\n position: fixed;\n width: 100%;\n bottom: 0;\n}\n\u003c/style\u003e\n\u003cbody\u003e\n \u003cheader\u003e\n \u003ch1\u003eInter\u0027national\u003c/h1\u003e\n \u003c/header\u003e\n \u003cmain\u003e\n \u003csection id="video-container"\u003e\n \u003cvideo id="video-player" src="#" controls\u003e\u003c/video\u003e\n \u003c/section\u003e\n \u003csection id="chef-tips"\u003e\n \u003ch2\u003eChef\u0027s Tips\u003c/h2\u003e\n \u003cp\u003eEnjoy the tasty and healthy recipes shared by the best internationally recognized chefs. Discover the latest cooking trends and techniques.\u003c/p\u003e\n \u003c/section\u003e\n \u003c/main\u003e\n \u003cfooter\u003e\n \u003cp\u003e\u00a9 2022 Inter\u0027national. All rights reserved.\u003c/p\u003e\n \u003c/footer\u003e\n \u003c/body\u003e\n\u003c/html\u003e\n ### Data Fields Among the dataset fields, we differentiate between the following: * Fields: These are the dataset records themselves, for the moment just text fields are supported. These are the ones that will be used to provide responses to the questions. + image is of type 'text'. + html\_code is of type 'text'. * Questions: These are the questions that will be asked to the annotators. They can be of different types, such as 'RatingQuestion', 'TextQuestion', 'LabelQuestion', 'MultiLabelQuestion', and 'RankingQuestion'. + accuracy is of type 'rating' with the following allowed values [1, 2, 3, 4, 5, 6, 7]. + quality is of type 'multi\_label\_selection' with the following allowed values ['clean code', 'efficient', 'proper tags and classes']. + correction is of type 'text'. * Suggestions: As of Argilla 1.13.0, the suggestions have been included to provide the annotators with suggestions to ease or assist during the annotation process. Suggestions are linked to the existing questions, are always optional, and contain not just the suggestion itself, but also the metadata linked to it, if applicable. + (optional) accuracy-suggestion is of type 'rating' with the following allowed values [1, 2, 3, 4, 5, 6, 7]. + (optional) quality-suggestion is of type 'multi\_label\_selection' with the following allowed values ['clean code', 'efficient', 'proper tags and classes']. + (optional) correction-suggestion is of type 'text'. Additionally, we also have two more fields that are optional and are the following: * metadata: This is an optional field that can be used to provide additional information about the dataset record. This can be useful to provide additional context to the annotators, or to provide additional information about the dataset record itself. For example, you can use this to provide a link to the original source of the dataset record, or to provide additional information about the dataset record itself, such as the author, the date, or the source. The metadata is always optional, and can be potentially linked to the 'metadata\_properties' defined in the dataset configuration file in 'URL'. * external\_id: This is an optional field that can be used to provide an external ID for the dataset record. This can be useful if you want to link the dataset record to an external resource, such as a database or a file. ### Data Splits The dataset contains a single split, which is 'train'. Dataset Creation ---------------- To create this dataset we used the following snippet: json\n' + example['text'] + '\n ### Licensing Information ### Contributions
[ "### Dataset Summary\n\n\nThis dataset contains:\n\n\n* A dataset configuration file conforming to the Argilla dataset format named 'URL'. This configuration file will be used to configure the dataset when using the 'FeedbackDataset.from\\_huggingface' method in Argilla.\n* Dataset records in a format compatible with HuggingFace 'datasets'. These records will be loaded automatically when using 'FeedbackDataset.from\\_huggingface' and can be loaded independently using the 'datasets' library via 'load\\_dataset'.\n* The annotation guidelines that have been used for building and curating the dataset, if they've been defined in Argilla.", "### Load with Argilla\n\n\nTo load with Argilla, you'll just need to install Argilla as 'pip install argilla --upgrade' and then use the following code:", "### Load with 'datasets'\n\n\nTo load this dataset with 'datasets', you'll just need to install 'datasets' as 'pip install datasets --upgrade' and then use the following code:", "### Supported Tasks and Leaderboards\n\n\nThis dataset can contain multiple fields, questions and responses so it can be used for different NLP tasks, depending on the configuration. The dataset structure is described in the Dataset Structure section.\n\n\nThere are no leaderboards associated with this dataset.", "### Languages\n\n\nDataset Structure\n-----------------", "### Data in Argilla\n\n\nThe dataset is created in Argilla with: fields, questions, suggestions, metadata, vectors, and guidelines.\n\n\nThe fields are the dataset records themselves, for the moment just text fields are supported. These are the ones that will be used to provide responses to the questions.\n\n\n\nThe questions are the questions that will be asked to the annotators. They can be of different types, such as rating, text, label\\_selection, multi\\_label\\_selection, or ranking.\n\n\n\nThe suggestions are human or machine generated recommendations for each question to assist the annotator during the annotation process, so those are always linked to the existing questions, and named appending \"-suggestion\" and \"-suggestion-metadata\" to those, containing the value/s of the suggestion and its metadata, respectively. So on, the possible values are the same as in the table above, but the column name is appended with \"-suggestion\" and the metadata is appended with \"-suggestion-metadata\".\n\n\nThe metadata is a dictionary that can be used to provide additional information about the dataset record. This can be useful to provide additional context to the annotators, or to provide additional information about the dataset record itself. For example, you can use this to provide a link to the original source of the dataset record, or to provide additional information about the dataset record itself, such as the author, the date, or the source. The metadata is always optional, and can be potentially linked to the 'metadata\\_properties' defined in the dataset configuration file in 'URL'.\n\n\n\nThe guidelines, are optional as well, and are just a plain string that can be used to provide instructions to the annotators. Find those in the annotation guidelines section.", "### Data Instances\n\n\nAn example of a dataset instance in Argilla looks as follows:\n\n\njson\\n\\u003chtml\\u003e\\n\\u003cstyle\\u003e\\nbody {\\n font-family: Arial, sans-serif;\\n margin: 0;\\n padding: 0;\\n border-box;\\n}\\n\\nheader {\\n background: #ff0;\\n text-align: center;\\n padding: 20px;\\n}\\n\\n#video-container {\\n width: 70%;\\n margin: auto;\\n text-align: center;\\n}\\n\\n#video-player {\\n width: 100%;\\n height: 300px;\\n background: #f0f;\\n}\\n\\n#chef-tips {\\n margin: 40px;\\n}\\n\\n#chef-tips p {\\n color: #000;\\n line-height: 1.6;\\n}\\n\\nfooter {\\n background: #ff0;\\n text-align: center;\\n padding: 20px;\\n position: fixed;\\n width: 100%;\\n bottom: 0;\\n}\\n\\u003c/style\\u003e\\n\\u003cbody\\u003e\\n \\u003cheader\\u003e\\n \\u003ch1\\u003eInter\\u0027national\\u003c/h1\\u003e\\n \\u003c/header\\u003e\\n \\u003cmain\\u003e\\n \\u003csection id=\"video-container\"\\u003e\\n \\u003cvideo id=\"video-player\" src=\"#\" controls\\u003e\\u003c/video\\u003e\\n \\u003c/section\\u003e\\n \\u003csection id=\"chef-tips\"\\u003e\\n \\u003ch2\\u003eChef\\u0027s Tips\\u003c/h2\\u003e\\n \\u003cp\\u003eEnjoy the tasty and healthy recipes shared by the best internationally recognized chefs. Discover the latest cooking trends and techniques.\\u003c/p\\u003e\\n \\u003c/section\\u003e\\n \\u003c/main\\u003e\\n \\u003cfooter\\u003e\\n \\u003cp\\u003e\\u00a9 2022 Inter\\u0027national. All rights reserved.\\u003c/p\\u003e\\n \\u003c/footer\\u003e\\n \\u003c/body\\u003e\\n\\u003c/html\\u003e\\njson\\n\\u003chtml\\u003e\\n\\u003cstyle\\u003e\\nbody {\\n font-family: Arial, sans-serif;\\n margin: 0;\\n padding: 0;\\n border-box;\\n}\\n\\nheader {\\n background: #ff0;\\n text-align: center;\\n padding: 20px;\\n}\\n\\n#video-container {\\n width: 70%;\\n margin: auto;\\n text-align: center;\\n}\\n\\n#video-player {\\n width: 100%;\\n height: 300px;\\n background: #f0f;\\n}\\n\\n#chef-tips {\\n margin: 40px;\\n}\\n\\n#chef-tips p {\\n color: #000;\\n line-height: 1.6;\\n}\\n\\nfooter {\\n background: #ff0;\\n text-align: center;\\n padding: 20px;\\n position: fixed;\\n width: 100%;\\n bottom: 0;\\n}\\n\\u003c/style\\u003e\\n\\u003cbody\\u003e\\n \\u003cheader\\u003e\\n \\u003ch1\\u003eInter\\u0027national\\u003c/h1\\u003e\\n \\u003c/header\\u003e\\n \\u003cmain\\u003e\\n \\u003csection id=\"video-container\"\\u003e\\n \\u003cvideo id=\"video-player\" src=\"#\" controls\\u003e\\u003c/video\\u003e\\n \\u003c/section\\u003e\\n \\u003csection id=\"chef-tips\"\\u003e\\n \\u003ch2\\u003eChef\\u0027s Tips\\u003c/h2\\u003e\\n \\u003cp\\u003eEnjoy the tasty and healthy recipes shared by the best internationally recognized chefs. Discover the latest cooking trends and techniques.\\u003c/p\\u003e\\n \\u003c/section\\u003e\\n \\u003c/main\\u003e\\n \\u003cfooter\\u003e\\n \\u003cp\\u003e\\u00a9 2022 Inter\\u0027national. All rights reserved.\\u003c/p\\u003e\\n \\u003c/footer\\u003e\\n \\u003c/body\\u003e\\n\\u003c/html\\u003e\\n\n\n\nWhile the same record in HuggingFace 'datasets' looks as follows:\n\n\njson\\n\\u003chtml\\u003e\\n\\u003cstyle\\u003e\\nbody {\\n font-family: Arial, sans-serif;\\n margin: 0;\\n padding: 0;\\n border-box;\\n}\\n\\nheader {\\n background: #ff0;\\n text-align: center;\\n padding: 20px;\\n}\\n\\n#video-container {\\n width: 70%;\\n margin: auto;\\n text-align: center;\\n}\\n\\n#video-player {\\n width: 100%;\\n height: 300px;\\n background: #f0f;\\n}\\n\\n#chef-tips {\\n margin: 40px;\\n}\\n\\n#chef-tips p {\\n color: #000;\\n line-height: 1.6;\\n}\\n\\nfooter {\\n background: #ff0;\\n text-align: center;\\n padding: 20px;\\n position: fixed;\\n width: 100%;\\n bottom: 0;\\n}\\n\\u003c/style\\u003e\\n\\u003cbody\\u003e\\n \\u003cheader\\u003e\\n \\u003ch1\\u003eInter\\u0027national\\u003c/h1\\u003e\\n \\u003c/header\\u003e\\n \\u003cmain\\u003e\\n \\u003csection id=\"video-container\"\\u003e\\n \\u003cvideo id=\"video-player\" src=\"#\" controls\\u003e\\u003c/video\\u003e\\n \\u003c/section\\u003e\\n \\u003csection id=\"chef-tips\"\\u003e\\n \\u003ch2\\u003eChef\\u0027s Tips\\u003c/h2\\u003e\\n \\u003cp\\u003eEnjoy the tasty and healthy recipes shared by the best internationally recognized chefs. Discover the latest cooking trends and techniques.\\u003c/p\\u003e\\n \\u003c/section\\u003e\\n \\u003c/main\\u003e\\n \\u003cfooter\\u003e\\n \\u003cp\\u003e\\u00a9 2022 Inter\\u0027national. All rights reserved.\\u003c/p\\u003e\\n \\u003c/footer\\u003e\\n \\u003c/body\\u003e\\n\\u003c/html\\u003e\\njson\\n\\u003chtml\\u003e\\n\\u003cstyle\\u003e\\nbody {\\n font-family: Arial, sans-serif;\\n margin: 0;\\n padding: 0;\\n border-box;\\n}\\n\\nheader {\\n background: #ff0;\\n text-align: center;\\n padding: 20px;\\n}\\n\\n#video-container {\\n width: 70%;\\n margin: auto;\\n text-align: center;\\n}\\n\\n#video-player {\\n width: 100%;\\n height: 300px;\\n background: #f0f;\\n}\\n\\n#chef-tips {\\n margin: 40px;\\n}\\n\\n#chef-tips p {\\n color: #000;\\n line-height: 1.6;\\n}\\n\\nfooter {\\n background: #ff0;\\n text-align: center;\\n padding: 20px;\\n position: fixed;\\n width: 100%;\\n bottom: 0;\\n}\\n\\u003c/style\\u003e\\n\\u003cbody\\u003e\\n \\u003cheader\\u003e\\n \\u003ch1\\u003eInter\\u0027national\\u003c/h1\\u003e\\n \\u003c/header\\u003e\\n \\u003cmain\\u003e\\n \\u003csection id=\"video-container\"\\u003e\\n \\u003cvideo id=\"video-player\" src=\"#\" controls\\u003e\\u003c/video\\u003e\\n \\u003c/section\\u003e\\n \\u003csection id=\"chef-tips\"\\u003e\\n \\u003ch2\\u003eChef\\u0027s Tips\\u003c/h2\\u003e\\n \\u003cp\\u003eEnjoy the tasty and healthy recipes shared by the best internationally recognized chefs. Discover the latest cooking trends and techniques.\\u003c/p\\u003e\\n \\u003c/section\\u003e\\n \\u003c/main\\u003e\\n \\u003cfooter\\u003e\\n \\u003cp\\u003e\\u00a9 2022 Inter\\u0027national. All rights reserved.\\u003c/p\\u003e\\n \\u003c/footer\\u003e\\n \\u003c/body\\u003e\\n\\u003c/html\\u003e\\n", "### Data Fields\n\n\nAmong the dataset fields, we differentiate between the following:\n\n\n* Fields: These are the dataset records themselves, for the moment just text fields are supported. These are the ones that will be used to provide responses to the questions.\n\n\n\t+ image is of type 'text'.\n\t+ html\\_code is of type 'text'.\n* Questions: These are the questions that will be asked to the annotators. They can be of different types, such as 'RatingQuestion', 'TextQuestion', 'LabelQuestion', 'MultiLabelQuestion', and 'RankingQuestion'.\n\n\n\t+ accuracy is of type 'rating' with the following allowed values [1, 2, 3, 4, 5, 6, 7].\n\t+ quality is of type 'multi\\_label\\_selection' with the following allowed values ['clean code', 'efficient', 'proper tags and classes'].\n\t+ correction is of type 'text'.\n* Suggestions: As of Argilla 1.13.0, the suggestions have been included to provide the annotators with suggestions to ease or assist during the annotation process. Suggestions are linked to the existing questions, are always optional, and contain not just the suggestion itself, but also the metadata linked to it, if applicable.\n\n\n\t+ (optional) accuracy-suggestion is of type 'rating' with the following allowed values [1, 2, 3, 4, 5, 6, 7].\n\t+ (optional) quality-suggestion is of type 'multi\\_label\\_selection' with the following allowed values ['clean code', 'efficient', 'proper tags and classes'].\n\t+ (optional) correction-suggestion is of type 'text'.\n\n\nAdditionally, we also have two more fields that are optional and are the following:\n\n\n* metadata: This is an optional field that can be used to provide additional information about the dataset record. This can be useful to provide additional context to the annotators, or to provide additional information about the dataset record itself. For example, you can use this to provide a link to the original source of the dataset record, or to provide additional information about the dataset record itself, such as the author, the date, or the source. The metadata is always optional, and can be potentially linked to the 'metadata\\_properties' defined in the dataset configuration file in 'URL'.\n* external\\_id: This is an optional field that can be used to provide an external ID for the dataset record. This can be useful if you want to link the dataset record to an external resource, such as a database or a file.", "### Data Splits\n\n\nThe dataset contains a single split, which is 'train'.\n\n\nDataset Creation\n----------------\n\n\nTo create this dataset we used the following snippet:\n\n\njson\\n' + example['text'] + '\\n", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#size_categories-1K<n<10K #rlfh #argilla #human-feedback #region-us \n", "### Dataset Summary\n\n\nThis dataset contains:\n\n\n* A dataset configuration file conforming to the Argilla dataset format named 'URL'. This configuration file will be used to configure the dataset when using the 'FeedbackDataset.from\\_huggingface' method in Argilla.\n* Dataset records in a format compatible with HuggingFace 'datasets'. These records will be loaded automatically when using 'FeedbackDataset.from\\_huggingface' and can be loaded independently using the 'datasets' library via 'load\\_dataset'.\n* The annotation guidelines that have been used for building and curating the dataset, if they've been defined in Argilla.", "### Load with Argilla\n\n\nTo load with Argilla, you'll just need to install Argilla as 'pip install argilla --upgrade' and then use the following code:", "### Load with 'datasets'\n\n\nTo load this dataset with 'datasets', you'll just need to install 'datasets' as 'pip install datasets --upgrade' and then use the following code:", "### Supported Tasks and Leaderboards\n\n\nThis dataset can contain multiple fields, questions and responses so it can be used for different NLP tasks, depending on the configuration. The dataset structure is described in the Dataset Structure section.\n\n\nThere are no leaderboards associated with this dataset.", "### Languages\n\n\nDataset Structure\n-----------------", "### Data in Argilla\n\n\nThe dataset is created in Argilla with: fields, questions, suggestions, metadata, vectors, and guidelines.\n\n\nThe fields are the dataset records themselves, for the moment just text fields are supported. These are the ones that will be used to provide responses to the questions.\n\n\n\nThe questions are the questions that will be asked to the annotators. They can be of different types, such as rating, text, label\\_selection, multi\\_label\\_selection, or ranking.\n\n\n\nThe suggestions are human or machine generated recommendations for each question to assist the annotator during the annotation process, so those are always linked to the existing questions, and named appending \"-suggestion\" and \"-suggestion-metadata\" to those, containing the value/s of the suggestion and its metadata, respectively. So on, the possible values are the same as in the table above, but the column name is appended with \"-suggestion\" and the metadata is appended with \"-suggestion-metadata\".\n\n\nThe metadata is a dictionary that can be used to provide additional information about the dataset record. This can be useful to provide additional context to the annotators, or to provide additional information about the dataset record itself. For example, you can use this to provide a link to the original source of the dataset record, or to provide additional information about the dataset record itself, such as the author, the date, or the source. The metadata is always optional, and can be potentially linked to the 'metadata\\_properties' defined in the dataset configuration file in 'URL'.\n\n\n\nThe guidelines, are optional as well, and are just a plain string that can be used to provide instructions to the annotators. Find those in the annotation guidelines section.", "### Data Instances\n\n\nAn example of a dataset instance in Argilla looks as follows:\n\n\njson\\n\\u003chtml\\u003e\\n\\u003cstyle\\u003e\\nbody {\\n font-family: Arial, sans-serif;\\n margin: 0;\\n padding: 0;\\n border-box;\\n}\\n\\nheader {\\n background: #ff0;\\n text-align: center;\\n padding: 20px;\\n}\\n\\n#video-container {\\n width: 70%;\\n margin: auto;\\n text-align: center;\\n}\\n\\n#video-player {\\n width: 100%;\\n height: 300px;\\n background: #f0f;\\n}\\n\\n#chef-tips {\\n margin: 40px;\\n}\\n\\n#chef-tips p {\\n color: #000;\\n line-height: 1.6;\\n}\\n\\nfooter {\\n background: #ff0;\\n text-align: center;\\n padding: 20px;\\n position: fixed;\\n width: 100%;\\n bottom: 0;\\n}\\n\\u003c/style\\u003e\\n\\u003cbody\\u003e\\n \\u003cheader\\u003e\\n \\u003ch1\\u003eInter\\u0027national\\u003c/h1\\u003e\\n \\u003c/header\\u003e\\n \\u003cmain\\u003e\\n \\u003csection id=\"video-container\"\\u003e\\n \\u003cvideo id=\"video-player\" src=\"#\" controls\\u003e\\u003c/video\\u003e\\n \\u003c/section\\u003e\\n \\u003csection id=\"chef-tips\"\\u003e\\n \\u003ch2\\u003eChef\\u0027s Tips\\u003c/h2\\u003e\\n \\u003cp\\u003eEnjoy the tasty and healthy recipes shared by the best internationally recognized chefs. Discover the latest cooking trends and techniques.\\u003c/p\\u003e\\n \\u003c/section\\u003e\\n \\u003c/main\\u003e\\n \\u003cfooter\\u003e\\n \\u003cp\\u003e\\u00a9 2022 Inter\\u0027national. All rights reserved.\\u003c/p\\u003e\\n \\u003c/footer\\u003e\\n \\u003c/body\\u003e\\n\\u003c/html\\u003e\\njson\\n\\u003chtml\\u003e\\n\\u003cstyle\\u003e\\nbody {\\n font-family: Arial, sans-serif;\\n margin: 0;\\n padding: 0;\\n border-box;\\n}\\n\\nheader {\\n background: #ff0;\\n text-align: center;\\n padding: 20px;\\n}\\n\\n#video-container {\\n width: 70%;\\n margin: auto;\\n text-align: center;\\n}\\n\\n#video-player {\\n width: 100%;\\n height: 300px;\\n background: #f0f;\\n}\\n\\n#chef-tips {\\n margin: 40px;\\n}\\n\\n#chef-tips p {\\n color: #000;\\n line-height: 1.6;\\n}\\n\\nfooter {\\n background: #ff0;\\n text-align: center;\\n padding: 20px;\\n position: fixed;\\n width: 100%;\\n bottom: 0;\\n}\\n\\u003c/style\\u003e\\n\\u003cbody\\u003e\\n \\u003cheader\\u003e\\n \\u003ch1\\u003eInter\\u0027national\\u003c/h1\\u003e\\n \\u003c/header\\u003e\\n \\u003cmain\\u003e\\n \\u003csection id=\"video-container\"\\u003e\\n \\u003cvideo id=\"video-player\" src=\"#\" controls\\u003e\\u003c/video\\u003e\\n \\u003c/section\\u003e\\n \\u003csection id=\"chef-tips\"\\u003e\\n \\u003ch2\\u003eChef\\u0027s Tips\\u003c/h2\\u003e\\n \\u003cp\\u003eEnjoy the tasty and healthy recipes shared by the best internationally recognized chefs. Discover the latest cooking trends and techniques.\\u003c/p\\u003e\\n \\u003c/section\\u003e\\n \\u003c/main\\u003e\\n \\u003cfooter\\u003e\\n \\u003cp\\u003e\\u00a9 2022 Inter\\u0027national. All rights reserved.\\u003c/p\\u003e\\n \\u003c/footer\\u003e\\n \\u003c/body\\u003e\\n\\u003c/html\\u003e\\n\n\n\nWhile the same record in HuggingFace 'datasets' looks as follows:\n\n\njson\\n\\u003chtml\\u003e\\n\\u003cstyle\\u003e\\nbody {\\n font-family: Arial, sans-serif;\\n margin: 0;\\n padding: 0;\\n border-box;\\n}\\n\\nheader {\\n background: #ff0;\\n text-align: center;\\n padding: 20px;\\n}\\n\\n#video-container {\\n width: 70%;\\n margin: auto;\\n text-align: center;\\n}\\n\\n#video-player {\\n width: 100%;\\n height: 300px;\\n background: #f0f;\\n}\\n\\n#chef-tips {\\n margin: 40px;\\n}\\n\\n#chef-tips p {\\n color: #000;\\n line-height: 1.6;\\n}\\n\\nfooter {\\n background: #ff0;\\n text-align: center;\\n padding: 20px;\\n position: fixed;\\n width: 100%;\\n bottom: 0;\\n}\\n\\u003c/style\\u003e\\n\\u003cbody\\u003e\\n \\u003cheader\\u003e\\n \\u003ch1\\u003eInter\\u0027national\\u003c/h1\\u003e\\n \\u003c/header\\u003e\\n \\u003cmain\\u003e\\n \\u003csection id=\"video-container\"\\u003e\\n \\u003cvideo id=\"video-player\" src=\"#\" controls\\u003e\\u003c/video\\u003e\\n \\u003c/section\\u003e\\n \\u003csection id=\"chef-tips\"\\u003e\\n \\u003ch2\\u003eChef\\u0027s Tips\\u003c/h2\\u003e\\n \\u003cp\\u003eEnjoy the tasty and healthy recipes shared by the best internationally recognized chefs. Discover the latest cooking trends and techniques.\\u003c/p\\u003e\\n \\u003c/section\\u003e\\n \\u003c/main\\u003e\\n \\u003cfooter\\u003e\\n \\u003cp\\u003e\\u00a9 2022 Inter\\u0027national. All rights reserved.\\u003c/p\\u003e\\n \\u003c/footer\\u003e\\n \\u003c/body\\u003e\\n\\u003c/html\\u003e\\njson\\n\\u003chtml\\u003e\\n\\u003cstyle\\u003e\\nbody {\\n font-family: Arial, sans-serif;\\n margin: 0;\\n padding: 0;\\n border-box;\\n}\\n\\nheader {\\n background: #ff0;\\n text-align: center;\\n padding: 20px;\\n}\\n\\n#video-container {\\n width: 70%;\\n margin: auto;\\n text-align: center;\\n}\\n\\n#video-player {\\n width: 100%;\\n height: 300px;\\n background: #f0f;\\n}\\n\\n#chef-tips {\\n margin: 40px;\\n}\\n\\n#chef-tips p {\\n color: #000;\\n line-height: 1.6;\\n}\\n\\nfooter {\\n background: #ff0;\\n text-align: center;\\n padding: 20px;\\n position: fixed;\\n width: 100%;\\n bottom: 0;\\n}\\n\\u003c/style\\u003e\\n\\u003cbody\\u003e\\n \\u003cheader\\u003e\\n \\u003ch1\\u003eInter\\u0027national\\u003c/h1\\u003e\\n \\u003c/header\\u003e\\n \\u003cmain\\u003e\\n \\u003csection id=\"video-container\"\\u003e\\n \\u003cvideo id=\"video-player\" src=\"#\" controls\\u003e\\u003c/video\\u003e\\n \\u003c/section\\u003e\\n \\u003csection id=\"chef-tips\"\\u003e\\n \\u003ch2\\u003eChef\\u0027s Tips\\u003c/h2\\u003e\\n \\u003cp\\u003eEnjoy the tasty and healthy recipes shared by the best internationally recognized chefs. Discover the latest cooking trends and techniques.\\u003c/p\\u003e\\n \\u003c/section\\u003e\\n \\u003c/main\\u003e\\n \\u003cfooter\\u003e\\n \\u003cp\\u003e\\u00a9 2022 Inter\\u0027national. All rights reserved.\\u003c/p\\u003e\\n \\u003c/footer\\u003e\\n \\u003c/body\\u003e\\n\\u003c/html\\u003e\\n", "### Data Fields\n\n\nAmong the dataset fields, we differentiate between the following:\n\n\n* Fields: These are the dataset records themselves, for the moment just text fields are supported. These are the ones that will be used to provide responses to the questions.\n\n\n\t+ image is of type 'text'.\n\t+ html\\_code is of type 'text'.\n* Questions: These are the questions that will be asked to the annotators. They can be of different types, such as 'RatingQuestion', 'TextQuestion', 'LabelQuestion', 'MultiLabelQuestion', and 'RankingQuestion'.\n\n\n\t+ accuracy is of type 'rating' with the following allowed values [1, 2, 3, 4, 5, 6, 7].\n\t+ quality is of type 'multi\\_label\\_selection' with the following allowed values ['clean code', 'efficient', 'proper tags and classes'].\n\t+ correction is of type 'text'.\n* Suggestions: As of Argilla 1.13.0, the suggestions have been included to provide the annotators with suggestions to ease or assist during the annotation process. Suggestions are linked to the existing questions, are always optional, and contain not just the suggestion itself, but also the metadata linked to it, if applicable.\n\n\n\t+ (optional) accuracy-suggestion is of type 'rating' with the following allowed values [1, 2, 3, 4, 5, 6, 7].\n\t+ (optional) quality-suggestion is of type 'multi\\_label\\_selection' with the following allowed values ['clean code', 'efficient', 'proper tags and classes'].\n\t+ (optional) correction-suggestion is of type 'text'.\n\n\nAdditionally, we also have two more fields that are optional and are the following:\n\n\n* metadata: This is an optional field that can be used to provide additional information about the dataset record. This can be useful to provide additional context to the annotators, or to provide additional information about the dataset record itself. For example, you can use this to provide a link to the original source of the dataset record, or to provide additional information about the dataset record itself, such as the author, the date, or the source. The metadata is always optional, and can be potentially linked to the 'metadata\\_properties' defined in the dataset configuration file in 'URL'.\n* external\\_id: This is an optional field that can be used to provide an external ID for the dataset record. This can be useful if you want to link the dataset record to an external resource, such as a database or a file.", "### Data Splits\n\n\nThe dataset contains a single split, which is 'train'.\n\n\nDataset Creation\n----------------\n\n\nTo create this dataset we used the following snippet:\n\n\njson\\n' + example['text'] + '\\n", "### Licensing Information", "### Contributions" ]
[ 29, 162, 40, 53, 68, 11, 404, 2616, 611, 56, 6, 5 ]
[ "passage: TAGS\n#size_categories-1K<n<10K #rlfh #argilla #human-feedback #region-us \n### Dataset Summary\n\n\nThis dataset contains:\n\n\n* A dataset configuration file conforming to the Argilla dataset format named 'URL'. This configuration file will be used to configure the dataset when using the 'FeedbackDataset.from\\_huggingface' method in Argilla.\n* Dataset records in a format compatible with HuggingFace 'datasets'. These records will be loaded automatically when using 'FeedbackDataset.from\\_huggingface' and can be loaded independently using the 'datasets' library via 'load\\_dataset'.\n* The annotation guidelines that have been used for building and curating the dataset, if they've been defined in Argilla.### Load with Argilla\n\n\nTo load with Argilla, you'll just need to install Argilla as 'pip install argilla --upgrade' and then use the following code:### Load with 'datasets'\n\n\nTo load this dataset with 'datasets', you'll just need to install 'datasets' as 'pip install datasets --upgrade' and then use the following code:### Supported Tasks and Leaderboards\n\n\nThis dataset can contain multiple fields, questions and responses so it can be used for different NLP tasks, depending on the configuration. The dataset structure is described in the Dataset Structure section.\n\n\nThere are no leaderboards associated with this dataset.### Languages\n\n\nDataset Structure\n-----------------", "passage: ### Data in Argilla\n\n\nThe dataset is created in Argilla with: fields, questions, suggestions, metadata, vectors, and guidelines.\n\n\nThe fields are the dataset records themselves, for the moment just text fields are supported. These are the ones that will be used to provide responses to the questions.\n\n\n\nThe questions are the questions that will be asked to the annotators. They can be of different types, such as rating, text, label\\_selection, multi\\_label\\_selection, or ranking.\n\n\n\nThe suggestions are human or machine generated recommendations for each question to assist the annotator during the annotation process, so those are always linked to the existing questions, and named appending \"-suggestion\" and \"-suggestion-metadata\" to those, containing the value/s of the suggestion and its metadata, respectively. So on, the possible values are the same as in the table above, but the column name is appended with \"-suggestion\" and the metadata is appended with \"-suggestion-metadata\".\n\n\nThe metadata is a dictionary that can be used to provide additional information about the dataset record. This can be useful to provide additional context to the annotators, or to provide additional information about the dataset record itself. For example, you can use this to provide a link to the original source of the dataset record, or to provide additional information about the dataset record itself, such as the author, the date, or the source. The metadata is always optional, and can be potentially linked to the 'metadata\\_properties' defined in the dataset configuration file in 'URL'.\n\n\n\nThe guidelines, are optional as well, and are just a plain string that can be used to provide instructions to the annotators. Find those in the annotation guidelines section.", "passage: ### Data Instances\n\n\nAn example of a dataset instance in Argilla looks as follows:\n\n\njson\\n\\u003chtml\\u003e\\n\\u003cstyle\\u003e\\nbody {\\n font-family: Arial, sans-serif;\\n margin: 0;\\n padding: 0;\\n border-box;\\n}\\n\\nheader {\\n background: #ff0;\\n text-align: center;\\n padding: 20px;\\n}\\n\\n#video-container {\\n width: 70%;\\n margin: auto;\\n text-align: center;\\n}\\n\\n#video-player {\\n width: 100%;\\n height: 300px;\\n background: #f0f;\\n}\\n\\n#chef-tips {\\n margin: 40px;\\n}\\n\\n#chef-tips p {\\n color: #000;\\n line-height: 1.6;\\n}\\n\\nfooter {\\n background: #ff0;\\n text-align: center;\\n padding: 20px;\\n position: fixed;\\n width: 100%;\\n bottom: 0;\\n}\\n\\u003c/style\\u003e\\n\\u003cbody\\u003e\\n \\u003cheader\\u003e\\n \\u003ch1\\u003eInter\\u0027national\\u003c/h1\\u003e\\n \\u003c/header\\u003e\\n \\u003cmain\\u003e\\n \\u003csection id=\"video-container\"\\u003e\\n \\u003cvideo id=\"video-player\" src=\"#\" controls\\u003e\\u003c/video\\u003e\\n \\u003c/section\\u003e\\n \\u003csection id=\"chef-tips\"\\u003e\\n \\u003ch2\\u003eChef\\u0027s Tips\\u003c/h2\\u003e\\n \\u003cp\\u003eEnjoy the tasty and healthy recipes shared by the best internationally recognized chefs. Discover the latest cooking trends and techniques.\\u003c/p\\u003e\\n \\u003c/section\\u003e\\n \\u003c/main\\u003e\\n \\u003cfooter\\u003e\\n \\u003cp\\u003e\\u00a9 2022 Inter\\u0027national. All rights reserved.\\u003c/p\\u003e\\n \\u003c/footer\\u003e\\n \\u003c/body\\u003e\\n\\u003c/html\\u003e\\njson\\n\\u003chtml\\u003e\\n\\u003cstyle\\u003e\\nbody {\\n font-family: Arial, sans-serif;\\n margin: 0;\\n padding: 0;\\n border-box;\\n}\\n\\nheader {\\n background: #ff0;\\n text-align: center;\\n padding: 20px;\\n}\\n\\n#video-container {\\n width: 70%;\\n margin: auto;\\n text-align: center;\\n}\\n\\n#video-player {\\n width: 100%;\\n height: 300px;\\n background: #f0f;\\n}\\n\\n#chef-tips {\\n margin: 40px;\\n}\\n\\n#chef-tips p {\\n color: #000;\\n line-height: 1.6;\\n}\\n\\nfooter {\\n background: #ff0;\\n text-align: center;\\n padding: 20px;\\n position: fixed;\\n width: 100%;\\n bottom: 0;\\n}\\n\\u003c/style\\u003e\\n\\u003cbody\\u003e\\n \\u003cheader\\u003e\\n \\u003ch1\\u003eInter\\u0027national\\u003c/h1\\u003e\\n \\u003c/header\\u003e\\n \\u003cmain\\u003e\\n \\u003csection id=\"video-container\"\\u003e\\n \\u003cvideo id=\"video-player\" src=\"#\" controls\\u003e\\u003c/video\\u003e\\n \\u003c/section\\u003e\\n \\u003csection id=\"chef-tips\"\\u003e\\n \\u003ch2\\u003eChef\\u0027s Tips\\u003c/h2\\u003e\\n \\u003cp\\u003eEnjoy the tasty and healthy recipes shared by the best internationally recognized chefs. Discover the latest cooking trends and techniques.\\u003c/p\\u003e\\n \\u003c/section\\u003e\\n \\u003c/main\\u003e\\n \\u003cfooter\\u003e\\n \\u003cp\\u003e\\u00a9 2022 Inter\\u0027national. All rights reserved.\\u003c/p\\u003e\\n \\u003c/footer\\u003e\\n \\u003c/body\\u003e\\n\\u003c/html\\u003e\\n\n\n\nWhile the same record in HuggingFace 'datasets' looks as follows:\n\n\njson\\n\\u003chtml\\u003e\\n\\u003cstyle\\u003e\\nbody {\\n font-family: Arial, sans-serif;\\n margin: 0;\\n padding: 0;\\n border-box;\\n}\\n\\nheader {\\n background: #ff0;\\n text-align: center;\\n padding: 20px;\\n}\\n\\n#video-container {\\n width: 70%;\\n margin: auto;\\n text-align: center;\\n}\\n\\n#video-player {\\n width: 100%;\\n height: 300px;\\n background: #f0f;\\n}\\n\\n#chef-tips {\\n margin: 40px;\\n}\\n\\n#chef-tips p {\\n color: #000;\\n line-height: 1.6;\\n}\\n\\nfooter {\\n background: #ff0;\\n text-align: center;\\n padding: 20px;\\n position: fixed;\\n width: 100%;\\n bottom: 0;\\n}\\n\\u003c/style\\u003e\\n\\u003cbody\\u003e\\n \\u003cheader\\u003e\\n \\u003ch1\\u003eInter\\u0027national\\u003c/h1\\u003e\\n \\u003c/header\\u003e\\n \\u003cmain\\u003e\\n \\u003csection id=\"video-container\"\\u003e\\n \\u003cvideo id=\"video-player\" src=\"#\" controls\\u003e\\u003c/video\\u003e\\n \\u003c/section\\u003e\\n \\u003csection id=\"chef-tips\"\\u003e\\n \\u003ch2\\u003eChef\\u0027s Tips\\u003c/h2\\u003e\\n \\u003cp\\u003eEnjoy the tasty and healthy recipes shared by the best internationally recognized chefs. Discover the latest cooking trends and techniques.\\u003c/p\\u003e\\n \\u003c/section\\u003e\\n \\u003c/main\\u003e\\n \\u003cfooter\\u003e\\n \\u003cp\\u003e\\u00a9 2022 Inter\\u0027national. All rights reserved.\\u003c/p\\u003e\\n \\u003c/footer\\u003e\\n \\u003c/body\\u003e\\n\\u003c/html\\u003e\\njson\\n\\u003chtml\\u003e\\n\\u003cstyle\\u003e\\nbody {\\n font-family: Arial, sans-serif;\\n margin: 0;\\n padding: 0;\\n border-box;\\n}\\n\\nheader {\\n background: #ff0;\\n text-align: center;\\n padding: 20px;\\n}\\n\\n#video-container {\\n width: 70%;\\n margin: auto;\\n text-align: center;\\n}\\n\\n#video-player {\\n width: 100%;\\n height: 300px;\\n background: #f0f;\\n}\\n\\n#chef-tips {\\n margin: 40px;\\n}\\n\\n#chef-tips p {\\n color: #000;\\n line-height: 1.6;\\n}\\n\\nfooter {\\n background: #ff0;\\n text-align: center;\\n padding: 20px;\\n position: fixed;\\n width: 100%;\\n bottom: 0;\\n}\\n\\u003c/style\\u003e\\n\\u003cbody\\u003e\\n \\u003cheader\\u003e\\n \\u003ch1\\u003eInter\\u0027national\\u003c/h1\\u003e\\n \\u003c/header\\u003e\\n \\u003cmain\\u003e\\n \\u003csection id=\"video-container\"\\u003e\\n \\u003cvideo id=\"video-player\" src=\"#\" controls\\u003e\\u003c/video\\u003e\\n \\u003c/section\\u003e\\n \\u003csection id=\"chef-tips\"\\u003e\\n \\u003ch2\\u003eChef\\u0027s Tips\\u003c/h2\\u003e\\n \\u003cp\\u003eEnjoy the tasty and healthy recipes shared by the best internationally recognized chefs. Discover the latest cooking trends and techniques.\\u003c/p\\u003e\\n \\u003c/section\\u003e\\n \\u003c/main\\u003e\\n \\u003cfooter\\u003e\\n \\u003cp\\u003e\\u00a9 2022 Inter\\u0027national. All rights reserved.\\u003c/p\\u003e\\n \\u003c/footer\\u003e\\n \\u003c/body\\u003e\\n\\u003c/html\\u003e\\n" ]
[ -0.04497823119163513, 0.19709767401218414, -0.00680975615978241, 0.04860823228955269, 0.11041233688592911, 0.013656742870807648, 0.09251327067613602, 0.08638522028923035, -0.00024382273841183633, 0.09873641282320023, 0.046819042414426804, 0.04500957950949669, 0.09061229974031448, 0.13405068218708038, 0.03733476996421814, -0.2367369383573532, -0.02460731565952301, -0.04598204419016838, -0.039468880742788315, 0.09646113961935043, 0.07850827276706696, -0.07552231848239899, 0.07139324396848679, -0.061167631298303604, -0.05341712757945061, 0.00039409598684869707, -0.029516855254769325, -0.017871996387839317, 0.10094074159860611, 0.07984363287687302, 0.058080777525901794, -0.006794164422899485, 0.05333966016769409, -0.20105616748332977, 0.02757624350488186, 0.0725628137588501, -0.0028698344249278307, 0.03729759901762009, 0.07501184195280075, -0.04803524538874626, 0.08177006989717484, -0.0484485886991024, -0.013697165995836258, 0.029290327802300453, -0.12268974632024765, -0.15336133539676666, -0.05103546380996704, -0.012278750538825989, 0.0920376405119896, 0.05014817416667938, -0.014826842583715916, -0.023326458409428596, 0.022653775289654732, 0.0318986214697361, 0.16193681955337524, -0.07585502415895462, -0.04722405970096588, 0.06760155409574509, 0.001273609697818756, 0.06830865889787674, -0.08452314138412476, -0.008065582253038883, -0.03517106920480728, 0.014107161201536655, 0.03027271293103695, -0.03747132420539856, -0.053663235157728195, 0.020547060295939445, -0.07724741101264954, -0.06450117379426956, 0.17695534229278564, 0.024323968216776848, -0.021374814212322235, -0.08542224019765854, -0.03891560435295105, 0.018491631373763084, -0.02400887757539749, 0.0146558852866292, 0.01190925668925047, -0.011456099338829517, 0.06237867474555969, -0.03427601233124733, -0.08045624941587448, -0.011664035730063915, 0.010860147885978222, 0.03733755648136139, -0.017302626743912697, -0.007398976478725672, 0.0016582304378971457, 0.058127570897340775, -0.13424943387508392, -0.12866070866584778, 0.023788845166563988, -0.032194096595048904, -0.11132801324129105, 0.018744533881545067, -0.03271904215216637, -0.12489692121744156, 0.057600438594818115, 0.18724863231182098, 0.058089520782232285, 0.057527292519807816, -0.047959595918655396, 0.027240315452218056, 0.04410656914114952, 0.0727372094988823, -0.0653875544667244, -0.12493426352739334, 0.0008921027183532715, 0.04248140752315521, 0.035429514944553375, -0.0487053245306015, -0.056816354393959045, 0.015301438979804516, -0.04051661491394043, -0.0005742018111050129, 0.11900034546852112, 0.041237201541662216, -0.06387991458177567, -0.017162486910820007, 0.13975228369235992, -0.14322157204151154, 0.01192480232566595, 0.026681089773774147, -0.01621960662305355, -0.039849188178777695, 0.011465062387287617, 0.015360685996711254, -0.00802039634436369, 0.08575581759214401, -0.03489019349217415, -0.03728219494223595, -0.06917735189199448, -0.05106012150645256, 0.04698437452316284, -0.08293643593788147, -0.0063765221275389194, -0.047550421208143234, -0.0436280257999897, -0.06855562329292297, 0.09254140406847, -0.07004036009311676, -0.02016499824821949, 0.0019194111227989197, 0.009981001727283001, 0.037203844636678696, 0.0013349466025829315, 0.02836947701871395, -0.01430837158113718, 0.0650297999382019, 0.021568844094872475, 0.07837244868278503, -0.015652982518076897, 0.043232668191194534, -0.04632634297013283, 0.03899094834923744, -0.21665501594543457, 0.05325004458427429, -0.04485606029629707, 0.00836963765323162, -0.11736661195755005, -0.0008465076680295169, -0.018642829731106758, -0.002394854323938489, 0.05063981935381889, 0.11446285247802734, -0.19730453193187714, -0.022285176441073418, 0.11393927782773972, -0.07351835817098618, -0.05579164996743202, 0.06561797112226486, 0.010715271346271038, 0.02531476318836212, -0.002528096316382289, 0.15522600710391998, 0.08766593784093857, -0.07353648543357849, -0.03739597275853157, -0.018485935404896736, 0.001749583869241178, 0.05276942253112793, 0.09396984428167343, -0.0004815195279661566, 0.10461115837097168, 0.01028414536267519, -0.009294979274272919, 0.017822617664933205, 0.013406790792942047, -0.06631016731262207, 0.03868088126182556, -0.0010204626014456153, -0.03177861124277115, 0.047103460878133774, -0.10402315855026245, 0.032909635454416275, -0.0739123523235321, -0.09830236434936523, 0.031216567382216454, -0.058609116822481155, 0.027744794264435768, -0.045212019234895706, 0.08699679374694824, -0.025123877450823784, 0.025548338890075684, -0.13845513761043549, -0.08340508490800858, -0.02067018486559391, 0.05280792713165283, 0.061084646731615067, -0.0074544320814311504, 0.033447474241256714, 0.008138650096952915, -0.015732387080788612, 0.020596137270331383, 0.049774542450904846, -0.02184576354920864, 0.024256085976958275, -0.15821701288223267, -0.0247864481061697, -0.041869789361953735, 0.12288955599069595, -0.1731584072113037, -0.007502393331378698, 0.06980051100254059, 0.09654846787452698, 0.029910564422607422, -0.07637802511453629, 0.05195064842700958, -0.013132003135979176, -0.0322425477206707, -0.02526751160621643, 0.031243199482560158, 0.0008219170267693698, -0.008549387566745281, 0.05098157003521919, -0.15976502001285553, -0.029012998566031456, 0.050152942538261414, -0.020078426226973534, -0.044587794691324234, -0.10971611738204956, -0.026392890140414238, -0.036303117871284485, -0.02554931491613388, -0.011361892335116863, 0.07234609872102737, 0.0981087014079094, 0.04308571293950081, -0.03389936685562134, 0.013871061615645885, 0.031055251136422157, -0.021095821633934975, -0.047294240444898605, 0.047852773219347, 0.07006201148033142, -0.022540031000971794, 0.06243963912129402, 0.03984580561518669, 0.03724391385912895, 0.07753542810678482, 0.03353670984506607, -0.07692337781190872, -0.03486834466457367, 0.0037517063319683075, 0.03788143768906593, 0.07879360765218735, -0.027482986450195312, 0.03066178224980831, 0.046861570328474045, -0.017128728330135345, -0.012228690087795258, -0.06208603456616402, -0.004511128645390272, 0.027444278821349144, -0.027652224525809288, 0.0006928754155524075, -0.0024874231312423944, -0.04259423911571503, 0.06279115378856659, 0.029878342524170876, 0.003992170095443726, -0.04552102088928223, -0.04259074851870537, -0.06943272799253464, 0.13140615820884705, -0.09268581867218018, -0.22301192581653595, -0.1566522866487503, -0.17801912128925323, -0.02257443405687809, 0.04984607174992561, 0.020023642107844353, -0.03244314342737198, -0.038810085505247116, -0.04268943890929222, 0.010575562715530396, 0.032256364822387695, -0.06262831389904022, -0.06593651324510574, 0.01799975521862507, 0.05014200136065483, -0.053233545273542404, 0.02407844178378582, 0.05018946900963783, -0.022982969880104065, 0.08114707469940186, 0.052754033356904984, 0.16998769342899323, 0.08209653198719025, 0.03468549996614456, 0.007096370216459036, -0.012844450771808624, 0.17318858206272125, -0.09931077808141708, 0.11843017488718033, 0.1416230946779251, -0.04630172252655029, 0.09414651244878769, 0.21496538817882538, 0.029546111822128296, -0.0705912783741951, 0.009403340518474579, 0.038037825375795364, -0.06085655093193054, -0.16390499472618103, -0.039685748517513275, -0.044401805847883224, -0.016149483621120453, 0.10767003148794174, 0.022265290841460228, -0.057277437299489975, 0.05595196783542633, -0.03654571995139122, -0.007160841021686792, 0.01661480776965618, 0.06166188046336174, 0.15090221166610718, -0.017745070159435272, 0.0666552484035492, -0.02524312026798725, 0.004726991057395935, 0.0660632774233818, 0.10286913067102432, 0.11038509756326675, -0.07323060929775238, 0.10925676673650742, 0.0472588874399662, 0.013796095736324787, -0.03467817232012749, 0.06193318963050842, -0.03253859654068947, -0.002028706716373563, -0.04216789826750755, -0.07411182671785355, -0.08544093370437622, 0.06968443840742111, 0.008815552107989788, -0.12313810735940933, 0.009702883660793304, 0.027968959882855415, 0.002791191218420863, 0.09160369634628296, 0.0717187374830246, -0.17814354598522186, -0.000017075488358386792, 0.000568317249417305, 0.03770102560520172, -0.10112452507019043, -0.008190630003809929, 0.10051923245191574, -0.04296034947037697, 0.09259127825498581, -0.047171320766210556, 0.08183212578296661, -0.08933338522911072, 0.01700594834983349, 0.03418196737766266, 0.04904204607009888, -0.005871905479580164, 0.10168961435556412, -0.14633707702159882, 0.17396563291549683, 0.014769294299185276, -0.054393086582422256, -0.026416346430778503, 0.014300274662673473, -0.03751368448138237, -0.011067892424762249, 0.11885929107666016, 0.008328445255756378, -0.116727314889431, -0.12718890607357025, -0.07037622481584549, 0.03705671802163124, 0.08804766088724136, -0.03470250219106674, 0.07257527858018875, -0.023677879944443703, -0.027924591675400734, 0.0006825551390647888, 0.004967082757502794, -0.04109719768166542, -0.21874837577342987, 0.06672202795743942, 0.005182894412428141, 0.060728225857019424, -0.0075264256447553635, 0.008493694476783276, -0.0023924410343170166, 0.1196906566619873, -0.1001291275024414, -0.014782664366066456, -0.1123187318444252, 0.05752815306186676, 0.12141358107328415, -0.07809632271528244, 0.015510770492255688, -0.03419647365808487, 0.11554231494665146, -0.022846030071377754, -0.0164369884878397, 0.06349492818117142, -0.05405035987496376, -0.06426198780536652, -0.0666954517364502, 0.06009155511856079, 0.04305065795779228, 0.033966291695833206, -0.028348691761493683, 0.06572803109884262, 0.008700807578861713, -0.06378906220197678, 0.045174453407526016, 0.06274663656949997, 0.07571522146463394, 0.03208022937178612, -0.031526464968919754, 0.018857115879654884, -0.06563898921012878, -0.04787588492035866, 0.09727180749177933, 0.16813534498214722, -0.06961780786514282, 0.10980039834976196, 0.10394514352083206, -0.08593615144491196, -0.20574058592319489, -0.086654894053936, 0.04914545640349388, -0.016767937690019608, 0.033868562430143356, -0.2466905266046524, 0.07515320926904678, 0.015593715012073517, 0.010001006536185741, 0.011835510842502117, -0.14019791781902313, -0.06691177934408188, 0.015883522108197212, 0.023639926686882973, -0.026277532801032066, -0.11839324980974197, -0.037212010473012924, -0.04452739655971527, -0.13540081679821014, 0.04823407530784607, 0.08875689655542374, 0.04085960611701012, -0.02593967132270336, 0.0359354130923748, 0.04430508241057396, -0.053369518369436264, 0.10774505138397217, -0.0018445203313603997, 0.0173924770206213, -0.07426788657903671, -0.029543712735176086, 0.018137672916054726, -0.04572350159287453, 0.05882807448506355, -0.023251285776495934, -0.010511618107557297, -0.11180035024881363, -0.0149906100705266, -0.03414955735206604, -0.010931531898677349, -0.03414512425661087, -0.037176165729761124, -0.029945755377411842, 0.03441910818219185, 0.07188460230827332, 0.005041474476456642, 0.042997151613235474, -0.09457498788833618, 0.04489701986312866, 0.1251015067100525, 0.06594874709844589, -0.06168347969651222, -0.11443272978067398, -0.025090934708714485, 0.028505489230155945, -0.013724853284657001, -0.13117298483848572, 0.0452275425195694, 0.11404844373464584, 0.006549777928739786, 0.08421065658330917, 0.00465007871389389, -0.09785521030426025, -0.005899609532207251, 0.10746419429779053, -0.10768500715494156, -0.07723923772573471, 0.0011669161031022668, 0.08131038397550583, -0.16408556699752808, -0.08933788537979126, 0.1037086769938469, 0.0422816127538681, 0.0003603976219892502, 0.020545853301882744, 0.09015103429555893, 0.028788210824131966, 0.09089908748865128, 0.05204001069068909, 0.02597036026418209, -0.06288143992424011, 0.03085334412753582, 0.13974304497241974, -0.1481815129518509, 0.028337515890598297, 0.06247839331626892, -0.034287355840206146, -0.0533871054649353, 0.0004687309265136719, -0.024743443354964256, -0.05227874591946602, -0.03628969192504883, -0.025091221556067467, 0.02549750544130802, 0.027214987203478813, 0.027541592717170715, 0.010325867682695389, 0.014021833427250385, 0.012532047927379608, -0.04254835844039917, -0.06598231196403503, 0.10132613033056259, 0.00842347089201212, 0.06142251566052437, -0.09351516515016556, -0.05612235888838768, -0.009356449358165264, 0.011598159559071064, -0.0013862870400771499, -0.04257035627961159, -0.06055639311671257, 0.0074278912506997585, -0.031206265091896057, 0.0309368297457695, -0.07486709207296371, -0.002399038290604949, -0.008825733326375484, -0.022012153640389442, -0.004258038476109505, -0.03965640068054199, -0.03148231282830238, -0.02100953459739685, -0.049820881336927414, 0.06749873608350754, -0.1690688580274582, -0.01146241556853056, 0.06374842673540115, -0.04491543769836426, 0.09259318560361862, -0.028347650542855263, -0.041565775871276855, -0.034515175968408585, -0.12588854134082794, -0.045089732855558395, -0.015369348227977753, 0.032930564135313034, -0.000015926236301311292, -0.12964661419391632, 0.028747878968715668, -0.015168055891990662, -0.05422693490982056, -0.010687301866710186, 0.08153694123029709, -0.10431315749883652, 0.1330816000699997, -0.02089747227728367, -0.0667344331741333, -0.05811179801821709, 0.05712850019335747, 0.010531213134527206, 0.02239173837006092, 0.09840190410614014, -0.041671376675367355, 0.07525970786809921, -0.1389404982328415, -0.00853350106626749, -0.006167890038341284, -0.008572605438530445, 0.039290159940719604, -0.04575389251112938, 0.016596535220742226, -0.0069528124295175076, 0.12005090713500977, 0.02459920197725296, 0.04986086115241051, 0.010984784923493862, -0.03909243643283844, -0.01991790160536766, -0.007765764836221933, -0.015976866707205772, 0.02380494214594364, -0.024158932268619537, 0.03713376447558403, 0.03280895948410034, 0.0009038187563419342, 0.07924602180719376, 0.07486097514629364, 0.08026934415102005, 0.1314931958913803, -0.05168907716870308, 0.028196224942803383, -0.11391174048185349, -0.10457617044448853, 0.11385346204042435, -0.055251624435186386, 0.07832973450422287, -0.032504886388778687, 0.07881494611501694, 0.1085413470864296, -0.12393458932638168, 0.07904396206140518, -0.07090255618095398, -0.05728806182742119, -0.059229422360658646, -0.20502996444702148, -0.04268813133239746, -0.0952739343047142, -0.004530926700681448, -0.0862603560090065, 0.05972268059849739, 0.09597613662481308, -0.022287173196673393, 0.013279683887958527, 0.04738721251487732, 0.019481733441352844, -0.03789527714252472, -0.027850940823554993, 0.04820320010185242, 0.022324755787849426, 0.0677490308880806, 0.057416412979364395, 0.06024603918194771, 0.03249479830265045, 0.06420209258794785, 0.05201166868209839, 0.0965949073433876, 0.05150120332837105, -0.07358435541391373, -0.06554686278104782, -0.023172935470938683, 0.017156178131699562, 0.004231634084135294, 0.0838250145316124, 0.01778789423406124, 0.006437768694013357, 0.010264535434544086, 0.20288698375225067, -0.014675878919661045, -0.0792323425412178, -0.09941328316926956, 0.1514410525560379, -0.02417181245982647, 0.0154282720759511, -0.05766865611076355, -0.09473305940628052, 0.01359336543828249, 0.1527429074048996, 0.1661573201417923, -0.040229860693216324, -0.02040422149002552, 0.0554540753364563, 0.028813974931836128, 0.007338680326938629, 0.08424314856529236, 0.06387138366699219, 0.2182917594909668, -0.02458377182483673, -0.027471842244267464, -0.0637890100479126, -0.011148437857627869, -0.007269205991178751, 0.021680081263184547, -0.0036366384010761976, 0.004290603566914797, -0.026143528521060944, 0.11194109916687012, 0.0019241484114900231, -0.1760694831609726, -0.030128061771392822, -0.1124805212020874, -0.10076648741960526, -0.018639659509062767, 0.06987381726503372, 0.008771750144660473, 0.032157186418771744, 0.010218454524874687, -0.0583060085773468, 0.14906297624111176, 0.030276894569396973, -0.05965592339634895, 0.001898050308227539, 0.036315739154815674, -0.1220802590250969, 0.17285896837711334, -0.032392989844083786, 0.032028812915086746, 0.09352812170982361, -0.007378552109003067, -0.10014697909355164, 0.003564017591997981, 0.06739979982376099, -0.0036151024978607893, 0.03273085132241249, 0.13131652772426605, -0.016160905361175537, 0.11370205134153366, 0.07679995149374008, 0.0056158252991735935, 0.058988556265830994, -0.01119760051369667, 0.015272351913154125, -0.015825724229216576, 0.05362315475940704, -0.08256282657384872, 0.11956480890512466, 0.09958227723836899, -0.024768933653831482, -0.010975277982652187, -0.06181373819708824, -0.004100290592759848, 0.0123048797249794, 0.1600101739168167, -0.02066040225327015, -0.09619661420583725, -0.018799729645252228, -0.037329573184251785, 0.03476552665233612, -0.16017644107341766, -0.04684700071811676, 0.07909327000379562, -0.019396787509322166, -0.02036507986485958, 0.11873999983072281, 0.007700534071773291, 0.04116547480225563, -0.08367842435836792, -0.032873351126909256, 0.00562170147895813, 0.10714295506477356, -0.11626392602920532, -0.04720263183116913 ]
70e871eca473336266333c91583f846af1d19812
# Dataset Card for "alpaca_skewexp_minlength_merged" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
Asap7772/alpaca_skewexp_minlength_merged
[ "region:us" ]
2024-01-25T08:15:25+00:00
{"dataset_info": {"features": [{"name": "output", "dtype": "string"}, {"name": "text", "dtype": "string"}, {"name": "alpaca_text", "dtype": "string"}, {"name": "prompt", "dtype": "string"}, {"name": "alpaca_prompt", "dtype": "string"}, {"name": "y_ref", "dtype": "string"}, {"name": "y_1", "dtype": "string"}, {"name": "y_2", "dtype": "string"}, {"name": "y_w", "dtype": "string"}, {"name": "y_w_alpaca", "dtype": "string"}, {"name": "y_l", "dtype": "string"}, {"name": "y_l_alpaca", "dtype": "string"}, {"name": "y_w_score", "dtype": "float64"}, {"name": "y_l_score", "dtype": "float64"}, {"name": "score_diff", "dtype": "float64"}], "splits": [{"name": "train", "num_bytes": 145325132.0, "num_examples": 23000}, {"name": "test", "num_bytes": 12612158, "num_examples": 2000}], "download_size": 52601537, "dataset_size": 157937290.0}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}, {"split": "test", "path": "data/test-*"}]}]}
2024-01-25T08:15:34+00:00
[]
[]
TAGS #region-us
# Dataset Card for "alpaca_skewexp_minlength_merged" More Information needed
[ "# Dataset Card for \"alpaca_skewexp_minlength_merged\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"alpaca_skewexp_minlength_merged\"\n\nMore Information needed" ]
[ 6, 23 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"alpaca_skewexp_minlength_merged\"\n\nMore Information needed" ]
[ -0.09367068856954575, -0.018860485404729843, -0.003361755982041359, 0.033978838473558426, 0.06463555246591568, 0.07070662826299667, 0.11270374059677124, 0.0933670848608017, 0.0572027713060379, 0.0033923208247870207, 0.07494252920150757, -0.011440228670835495, 0.08269903808832169, 0.18926078081130981, -0.06964363902807236, -0.12588511407375336, 0.01714455522596836, 0.07802494615316391, -0.0948895588517189, 0.034748755395412445, 0.0523667186498642, -0.0924178808927536, 0.07395608723163605, -0.08969757705926895, -0.1916477531194687, 0.05885418504476547, -0.044381845742464066, -0.08854048699140549, 0.08470072597265244, 0.036763835698366165, 0.1594083458185196, -0.05957280844449997, -0.01390181016176939, -0.09664078056812286, 0.03902576491236687, -0.002335797995328903, 0.019054710865020752, 0.0682724118232727, 0.11789638549089432, 0.006567664444446564, -0.09712830930948257, -0.036797624081373215, 0.02546461671590805, -0.010328643023967743, -0.12234672158956528, -0.12443076074123383, -0.10455146431922913, -0.023608261719346046, 0.11765307933092117, -0.04351847246289253, 0.06165628135204315, 0.07633942365646362, -0.050170354545116425, 0.012334988452494144, 0.08401193469762802, -0.09605827927589417, 0.0037881641183048487, 0.13597677648067474, -0.04876305162906647, 0.082102932035923, -0.021074311807751656, 0.05416308715939522, 0.07282885164022446, -0.03524130582809448, -0.0005316485767252743, -0.06761936843395233, -0.15216907858848572, 0.07740575820207596, -0.021308232098817825, -0.05279504507780075, 0.1578139066696167, 0.0158520694822073, 0.05764303728938103, -0.04095810651779175, -0.051045261323451996, -0.012577692978084087, -0.034535136073827744, 0.1235206350684166, 0.05442165210843086, -0.0018405242590233684, 0.0648764818906784, -0.021812571212649345, -0.08552967011928558, -0.07892435044050217, -0.24567480385303497, 0.14012153446674347, -0.018178442493081093, 0.15018436312675476, -0.16658097505569458, -0.028599269688129425, -0.06646745651960373, -0.02468234859406948, 0.023145824670791626, -0.06040439382195473, -0.020588265731930733, 0.015865782275795937, -0.04539714753627777, -0.08934000879526138, 0.09281962364912033, 0.08351940661668777, 0.0776330754160881, 0.03865503892302513, 0.03665025532245636, 0.10513781011104584, 0.13454848527908325, -0.06602538377046585, -0.038569170981645584, -0.08347651362419128, -0.012129882350564003, -0.08722919970750809, 0.030114322900772095, -0.08149752765893936, -0.08688642829656601, -0.03787178918719292, -0.16134855151176453, 0.06294571608304977, 0.0165781881660223, -0.11440100520849228, -0.06867372989654541, -0.053391072899103165, 0.10667502880096436, -0.09302325546741486, 0.04888620600104332, -0.06669673323631287, -0.020133811980485916, 0.11254555732011795, -0.06068738177418709, -0.002261277986690402, 0.07400886714458466, 0.12147437036037445, -0.07789655029773712, -0.024210331961512566, -0.047674138098955154, -0.04943077266216278, 0.08823467046022415, -0.17502139508724213, 0.057614706456661224, -0.10930543392896652, -0.11215835064649582, 0.044097576290369034, 0.0032405417878180742, -0.07168762385845184, 0.09758901596069336, 0.019961057230830193, 0.1533248871564865, 0.010598019696772099, -0.08104033023118973, 0.15190427005290985, -0.054916977882385254, -0.014620288275182247, -0.02588658593595028, 0.14994509518146515, -0.23272167146205902, 0.028795432299375534, -0.0892258882522583, 0.06517097353935242, -0.053254544734954834, 0.06765857338905334, -0.07653534412384033, 0.12056152522563934, -0.07929681986570358, 0.00704076886177063, -0.11590315401554108, 0.018041538074612617, 0.026142621412873268, 0.031106168404221535, -0.16429413855075836, -0.06796284765005112, 0.15567649900913239, -0.10640226304531097, -0.12681268155574799, 0.06703848391771317, -0.007962644100189209, -0.028375595808029175, 0.011920811608433723, 0.2877834141254425, 0.0199262797832489, -0.025165319442749023, 0.043145641684532166, 0.13920313119888306, -0.09529338777065277, -0.25270983576774597, 0.08871833235025406, -0.018780432641506195, -0.09622711688280106, 0.01973172090947628, 0.1764075756072998, 0.12205720692873001, -0.051937852054834366, -0.07537076622247696, -0.042235687375068665, -0.11591947823762894, 0.0068383305333554745, -0.008590055629611015, 0.028621448203921318, -0.070033960044384, 0.18549609184265137, 0.11107456684112549, 0.06899711489677429, -0.009417803026735783, 0.009477420710027218, 0.0441022627055645, 0.10426722466945648, -0.15100674331188202, -0.014535611495375633, -0.12556523084640503, -0.1283481866121292, -0.04229099676012993, -0.06591902673244476, 0.028690563514828682, 0.07806818932294846, 0.04332080855965614, -0.038607075810432434, -0.0035392995923757553, 0.09717077016830444, 0.08102890104055405, 0.029016170650720596, -0.06381124258041382, 0.021449698135256767, 0.035345833748579025, -0.052003324031829834, -0.0591031052172184, 0.02797774225473404, -0.04848382622003555, 0.0052261874079704285, 0.0972084254026413, -0.0027262333314865828, 0.04242165759205818, 0.06647169589996338, 0.038983870297670364, 0.0009315619245171547, -0.016748184338212013, 0.037715718150138855, -0.04697573930025101, -0.030860604718327522, 0.08628585189580917, -0.0374491885304451, 0.13958972692489624, 0.1113685667514801, -0.06363976001739502, 0.06025365740060806, -0.18312637507915497, 0.017178447917103767, -0.04400016367435455, -0.10201555490493774, 0.024216841906309128, -0.015559633262455463, -0.02458478882908821, 0.10275382548570633, -0.07759425044059753, 0.03211435303092003, 0.02840576134622097, -0.06388632953166962, -0.08619905263185501, 0.03985441476106644, 0.058474306017160416, -0.24336457252502441, 0.1500190794467926, 0.2586459815502167, 0.11461187154054642, 0.13775788247585297, -0.041604381054639816, -0.10420026630163193, 0.02742815762758255, -0.1014685407280922, -0.0728950947523117, 0.11122920364141464, 0.042476437985897064, 0.008574402891099453, 0.11869646608829498, 0.0194233525544405, 0.07303451001644135, -0.1014283075928688, -0.05245902016758919, 0.017340514808893204, 0.0398479662835598, -0.08666589111089706, 0.02785809524357319, 0.02186310850083828, 0.07174106687307358, 0.04844876378774643, 0.0021109944209456444, 0.06191777437925339, 0.008515533991158009, 0.022315382957458496, 0.09106117486953735, -0.15834766626358032, -0.3302479088306427, -0.1731371432542801, -0.10649938136339188, 0.004751562140882015, 0.000913571915589273, -0.010253063403069973, -0.09813957661390305, -0.06353964656591415, 0.013858139514923096, 0.025055859237909317, -0.07818920910358429, 0.033671993762254715, 0.013916030526161194, 0.032811980694532394, -0.07577379792928696, -0.10246115177869797, 0.027527038007974625, -0.019107073545455933, 0.20527541637420654, 0.11995291709899902, -0.07181413471698761, 0.11718770116567612, 0.13038291037082672, -0.052790045738220215, 0.023869873955845833, -0.033710405230522156, 0.03781697526574135, -0.03427409008145332, -0.01037219725549221, 0.13473625481128693, -0.027442405000329018, 0.031131109222769737, 0.04099418967962265, 0.07550229132175446, -0.09036586433649063, -0.04282175004482269, -0.003780897706747055, -0.1812690794467926, -0.20866456627845764, -0.13705386221408844, -0.09148784726858139, 0.14395897090435028, 0.1348525434732437, 0.01498455461114645, -0.12351212650537491, 0.08422239869832993, 0.09586779773235321, 0.033345531672239304, -0.1566205620765686, -0.03821830824017525, 0.12807723879814148, -0.00866401381790638, 0.04757725074887276, -0.14260298013687134, 0.0013427447993308306, 0.15741726756095886, 0.24997621774673462, 0.13312435150146484, 0.06345672160387039, 0.10110615938901901, -0.01165208499878645, 0.06324061006307602, 0.11119787395000458, 0.12900717556476593, 0.027843063697218895, -0.02774340845644474, -0.006360647268593311, 0.042941298335790634, -0.013777918182313442, 0.013201482594013214, 0.06366428732872009, -0.1186598390340805, 0.021287109702825546, -0.062039151787757874, 0.007877098396420479, 0.034012965857982635, 0.10376255214214325, -0.16617342829704285, 0.011079448275268078, 0.020912157371640205, 0.07367616146802902, -0.10933563113212585, 0.042529866099357605, 0.13222555816173553, -0.03189793601632118, 0.124896340072155, -0.01880680024623871, 0.1264026015996933, -0.03059273026883602, -0.030867496505379677, -0.05924675241112709, -0.044217340648174286, 0.008305775001645088, 0.09300484508275986, -0.13147316873073578, 0.18127012252807617, 0.031225500628352165, -0.13905002176761627, -0.031038327142596245, -0.013310417532920837, -0.020186828449368477, 0.02086949162185192, 0.008809690363705158, 0.030405724421143532, -0.11692659556865692, -0.1975468099117279, -0.18309620022773743, -0.041095905005931854, 0.06085747107863426, 0.06242813542485237, -0.10011980682611465, 0.016424646601080894, 0.0005767918773926795, -0.053416311740875244, -0.13542385399341583, 0.09177134186029434, -0.13857154548168182, 0.014325723983347416, 0.09634647518396378, -0.18966709077358246, 0.03666919469833374, -0.008550274185836315, -0.16673606634140015, 0.10451366752386093, 0.0841933935880661, -0.0667700320482254, -0.09406343102455139, -0.006453363690525293, 0.16388382017612457, -0.008112755604088306, 0.10860181599855423, 0.0018756951903924346, 0.025043733417987823, -0.059182051569223404, -0.19275988638401031, 0.1284521520137787, -0.07938870042562485, 0.09748610854148865, -0.0768430158495903, 0.06050806865096092, -0.10390498489141464, 0.04358828812837601, -0.013814500533044338, 0.028761059045791626, -0.1225905567407608, -0.048268627375364304, 0.07892125844955444, -0.005874864291399717, 0.12904635071754456, 0.21342778205871582, 0.039446551352739334, 0.011673294007778168, 0.10740367323160172, -0.06273474544286728, 0.19731956720352173, 0.044250309467315674, -0.04997485131025314, 0.20508535206317902, 0.1080939844250679, 0.014297954738140106, -0.26419657468795776, -0.011551903560757637, -0.10396528989076614, 0.03825218975543976, 0.037286948412656784, -0.11597277969121933, 0.09367204457521439, 0.19405989348888397, -0.00820903666317463, 0.1887485682964325, -0.213265061378479, -0.051189716905355453, 0.12769854068756104, 0.021606789901852608, 0.27908921241760254, -0.05717731639742851, -0.062261033803224564, -0.11165018379688263, -0.16915248334407806, 0.12702608108520508, -0.11086085438728333, 0.03637249395251274, -0.0327899307012558, 0.10328325629234314, -0.0012408873299136758, -0.04093935340642929, 0.1897517889738083, 0.010245447978377342, 0.06752090156078339, -0.08630459755659103, -0.01552907470613718, 0.07188273966312408, -0.0725119337439537, 0.07160026580095291, -0.0334731861948967, 0.0022048603277653456, -0.1112508699297905, -0.012670958414673805, 0.05988480523228645, -0.012264088727533817, 0.09619488567113876, -0.04646396264433861, -0.10918580740690231, -0.043485142290592194, -0.1026519387960434, -0.009200888685882092, 0.1727725863456726, 0.009378228336572647, -0.03844199329614639, 0.10762246698141098, -0.009386628866195679, -0.17714270949363708, -0.09457135945558548, -0.09002676606178284, -0.08295273780822754, 0.07358641177415848, -0.20811793208122253, 0.04090560972690582, 0.10024596005678177, -0.025632347911596298, -0.026593126356601715, 0.03815891966223717, 0.024231743067502975, 0.04360656812787056, 0.16451789438724518, -0.07362189888954163, 0.0048218644224107265, 0.0851447731256485, -0.03571173548698425, 0.0186748206615448, -0.0012088149087503552, 0.04051955044269562, 0.0837741270661354, -0.029810914769768715, 0.004551378078758717, 0.03854333236813545, -0.08514221757650375, 0.12450115382671356, 0.0667499303817749, 0.028647180646657944, -0.16990023851394653, 0.19570602476596832, 0.06664999574422836, -0.15463994443416595, -0.03693283349275589, 0.06274405866861343, -0.05682595074176788, -0.09037847816944122, 0.04248519241809845, 0.13533718883991241, -0.16666072607040405, -0.09272491931915283, -0.022927353158593178, -0.09062910079956055, 0.028771955519914627, 0.03388138487935066, 0.0432596392929554, 0.06595185399055481, 0.008169923909008503, -0.06608947366476059, 0.0029685425106436014, -0.02454541064798832, -0.032637037336826324, 0.10227039456367493, -0.02694752626121044, -0.036417122930288315, -0.05081512778997421, 0.1763538420200348, -0.04845109581947327, -0.009169223718345165, -0.07044681161642075, 0.031813547015190125, -0.1865425854921341, 0.029022768139839172, -0.00528956251218915, 0.008565534837543964, -0.014701265841722488, 0.05783352628350258, -0.0041289799846708775, -0.048196226358413696, -0.07733721286058426, 0.045520320534706116, -0.013003307394683361, 0.009011562913656235, -0.03270787000656128, -0.06549099087715149, 0.08451575040817261, 0.06148222088813782, 0.05532936006784439, 0.06130652129650116, 0.03769662603735924, -0.011924810707569122, 0.016680000349879265, -0.13154809176921844, 0.10833198577165604, 0.07975149154663086, 0.08320585638284683, 0.03937120735645294, -0.0008728700340725482, 0.05840323865413666, -0.004996690433472395, 0.05353224650025368, -0.052515678107738495, -0.07284724712371826, -0.04375523701310158, -0.15824002027511597, -0.13020673394203186, 0.009319089353084564, -0.10061799734830856, 0.11486569792032242, 0.07182268053293228, -0.024415483698248863, 0.034447550773620605, 0.03168245032429695, -0.03882986679673195, -0.04008078947663307, -0.03969791159033775, -0.15873445570468903, 0.0028649817686527967, -0.01436679344624281, 0.03452238440513611, -0.028193779289722443, 0.3921363353729248, 0.07088282704353333, -0.12925083935260773, -0.017757391557097435, 0.08785567432641983, -0.019748853519558907, 0.04088116064667702, 0.3549073338508606, 0.09306326508522034, -0.033548496663570404, -0.05604366213083267, 0.11259035021066666, 0.05854535475373268, 0.19926169514656067, 0.06630844622850418, 0.11778607219457626, 0.02347205951809883, 0.04655248299241066, 0.04607990011572838, -0.06918113678693771, -0.046781331300735474, -0.05878248065710068, -0.04689014330506325, 0.04348549246788025, 0.05940103158354759, -0.023284289985895157, 0.03240995109081268, -0.14746442437171936, 0.0563247986137867, -0.009184702299535275, -0.04777156934142113, -0.06053989380598068, -0.031908098608255386, -0.05059121921658516, -0.07057389616966248, -0.03625313192605972, -0.023878809064626694, 0.004753666464239359, 0.17090310156345367, 0.021111108362674713, 0.04200976714491844, 0.09967303276062012, -0.10029787570238113, 0.012489480897784233, 0.019128093495965004, 0.044101912528276443, 0.0031238917727023363, -0.005277189891785383, -0.07119876891374588, 0.038667481392621994, -0.045605748891830444, -0.048142850399017334, -0.04167526215314865, 0.11360412836074829, 0.029031887650489807, -0.09811435639858246, -0.07368763536214828, -0.06843718141317368, 0.0367397777736187, -0.005582838319242001, -0.01574093848466873, 0.06650374829769135, 0.0612819641828537, 0.016759587451815605, 0.09685415029525757, -0.018709104508161545, 0.018822869285941124, -0.004139820113778114, -0.02402898296713829, -0.07685621827840805, 0.09899096935987473, -0.054236043244600296, -0.046859145164489746, -0.058605629950761795, 0.1583234816789627, 0.2220786064863205, -0.08878811448812485, -0.016037732362747192, 0.00279382336884737, 0.042646124958992004, 0.047798749059438705, 0.17002147436141968, 0.004840949550271034, 0.05552583187818527, -0.030630765482783318, -0.11940347403287888, 0.014895862899720669, -0.040706146508455276, -0.10816457122564316, 0.04348653554916382, 0.008181523531675339, -0.06408163160085678, -0.07320040464401245, 0.09455260634422302, -0.019900113344192505, 0.2095669060945511, 0.13744007050991058, -0.17469266057014465, -0.1124052032828331, -0.010290918871760368, 0.07688548415899277, -0.007336854934692383, 0.021029818803071976, -0.08273078501224518, 0.015841307118535042, 0.027217470109462738, 0.00017731462139636278, -0.29652559757232666, -0.18518321216106415, 0.02632482349872589, -0.002470226027071476, -0.02664739079773426, -0.005181715823709965, 0.06402364373207092, 0.03487275540828705, 0.04488057270646095, -0.0652264803647995, -0.0028408479411154985, -0.011020185425877571, 0.08129604905843735, 0.011734331026673317, 0.0071055400185287, -0.06218941882252693, -0.11158625036478043, 0.07941031455993652, -0.013354792259633541, -0.07339426875114441, 0.024405542761087418, 0.031168721616268158, 0.029278920963406563, -0.01960103213787079, -0.036891285330057144, 0.1491769403219223, 0.031298644840717316, -0.012511815875768661, -0.05390021950006485, 0.030622974038124084, 0.03691306710243225, 0.0050874934531748295, -0.03765423223376274, -0.06603699177503586, 0.00038600678090006113, -0.12676526606082916, -0.017693011090159416, 0.02432169020175934, -0.1531611680984497, -0.001655602129176259, -0.13449181616306305, 0.011409399099647999, -0.020156117156147957, 0.05711129680275917, 0.11618971079587936, 0.011712015606462955, -0.04072156921029091, -0.1212877482175827, 0.07378765940666199, -0.008035934530198574, 0.02298308163881302, -0.07523909211158752 ]
a199e02b65c6696c467501f01beeb8de9c719916
Datasets comprising 10,000 instances each used for uncertainty quantification in LLMs. 1. mmlu_10k is used for question answering. 2. cosmosqa_10k is used for reading comprehension. 3. hellaswag_10k is used for commonsense inference. 4. halu_dialogue is used for dialogue response selection. 5. halu_summarization is used for document summarization. For more details on how these datasets are utilized, check out our github repo: https://github.com/smartyfh/LLM-Uncertainty-Bench/tree/main
ErikYip/LLM-Uncertainty-Bench
[ "region:us" ]
2024-01-25T08:16:31+00:00
{}
2024-01-25T08:26:07+00:00
[]
[]
TAGS #region-us
Datasets comprising 10,000 instances each used for uncertainty quantification in LLMs. 1. mmlu_10k is used for question answering. 2. cosmosqa_10k is used for reading comprehension. 3. hellaswag_10k is used for commonsense inference. 4. halu_dialogue is used for dialogue response selection. 5. halu_summarization is used for document summarization. For more details on how these datasets are utilized, check out our github repo: URL
[]
[ "TAGS\n#region-us \n" ]
[ 6 ]
[ "passage: TAGS\n#region-us \n" ]
[ 0.024608636274933815, -0.026205500587821007, -0.009666500613093376, -0.10395516455173492, 0.08638657629489899, 0.059816278517246246, 0.01882290467619896, 0.020661840215325356, 0.23975107073783875, -0.005599027033895254, 0.1219947561621666, 0.0015615287702530622, -0.037353623658418655, 0.03733762726187706, -0.0035912662278860807, -0.17583473026752472, 0.03876631706953049, -0.018274923786520958, 0.01843859627842903, 0.026470553129911423, -0.07776834815740585, -0.07564429938793182, 0.015296397730708122, -0.10247814655303955, -0.083692267537117, 0.11002834886312485, 0.031466204673051834, -0.019670886918902397, 0.10779199749231339, -0.04243955761194229, 0.18699054419994354, -0.011512263678014278, -0.11213519424200058, -0.2536850869655609, 0.021806683391332626, -0.01765260472893715, -0.08747660368680954, 0.01506110467016697, 0.0665089413523674, -0.09014441072940826, -0.0588928684592247, 0.0795099288225174, -0.01132340170443058, 0.04246443510055542, -0.27593839168548584, -0.12684126198291779, -0.05297930911183357, -0.1421966552734375, 0.08651168644428253, 0.04035491496324539, 0.008764253929257393, 0.15506891906261444, -0.20897391438484192, 0.004104613792151213, 0.08255259692668915, -0.2538507878780365, 0.05591634660959244, 0.17671173810958862, 0.03623908758163452, 0.18037272989749908, 0.0060391901060938835, 0.11029672622680664, 0.0716743916273117, -0.024263937026262283, -0.17590197920799255, -0.08127854019403458, -0.04696211963891983, 0.16642488539218903, -0.06727185100317001, -0.14248386025428772, 0.34701237082481384, 0.00015008423360995948, 0.009657775051891804, 0.16921205818653107, -0.059524230659008026, -0.09972117841243744, 0.07259953022003174, 0.016484731808304787, 0.018492350354790688, 0.1471305936574936, 0.16307872533798218, -0.0458691343665123, -0.13837823271751404, -0.018630273640155792, -0.22798998653888702, 0.17510560154914856, -0.03248048573732376, 0.13137903809547424, -0.27447956800460815, 0.01684025302529335, -0.2570667266845703, 0.0032130838371813297, 0.04178816080093384, -0.06004921346902847, -0.0226522795855999, -0.013265985064208508, -0.08018817007541656, 0.004899587947875261, 0.06192673370242119, 0.1266920566558838, -0.06128726154565811, 0.06128238886594772, -0.09319206327199936, 0.141696035861969, 0.07166698575019836, 0.07868369668722153, 0.13037432730197906, 0.041205424815416336, -0.07187089323997498, -0.21872246265411377, -0.0026476888451725245, -0.06275863200426102, -0.09502086788415909, -0.0020165652967989445, -0.11606067419052124, 0.17244569957256317, -0.030802514404058456, -0.09825427830219269, -0.11208184063434601, 0.09148659557104111, -0.032992321997880936, -0.03437839448451996, -0.03552987426519394, -0.020977836102247238, 0.019381176680326462, 0.04704452306032181, -0.1548958420753479, -0.005131472367793322, 0.07039852440357208, 0.11502562463283539, -0.1346137970685959, -0.003783059772104025, -0.07908964157104492, 0.03039063885807991, 0.07654735445976257, -0.16510222852230072, 0.03158547356724739, -0.1124754324555397, -0.07531405985355377, 0.002912673633545637, -0.015710093080997467, -0.016202643513679504, 0.166526660323143, -0.0020451415330171585, 0.0714716836810112, -0.026345307007431984, -0.05890209600329399, -0.11243434250354767, -0.08489254862070084, 0.05390460044145584, 0.03670717030763626, 0.03266148269176483, -0.2193479984998703, 0.014805203303694725, -0.12762966752052307, 0.1360815018415451, -0.10566820204257965, -0.04705966264009476, -0.022842247039079666, 0.20562705397605896, 0.037286072969436646, 0.08762791007757187, -0.22171171009540558, 0.039756543934345245, -0.05404696613550186, 0.18480908870697021, -0.1502426266670227, -0.0799463614821434, 0.20813211798667908, -0.07964949309825897, -0.10115210711956024, 0.021235812455415726, 0.020391687750816345, 0.026287272572517395, 0.0766737088561058, 0.4564172327518463, -0.09766800701618195, -0.09146861732006073, 0.10178250074386597, 0.17055274546146393, -0.12427149713039398, -0.1827561855316162, 0.06446871906518936, -0.16666454076766968, -0.1973118633031845, 0.0018917324487119913, 0.09222044050693512, 0.038269978016614914, -0.07875611633062363, -0.020746968686580658, 0.06325206160545349, -0.0007678253459744155, 0.09095914661884308, 0.03755716234445572, 0.09034032374620438, -0.08716782182455063, 0.11115926504135132, -0.05017651244997978, 0.004037132486701012, 0.1343354731798172, 0.027325427159667015, -0.03223329409956932, 0.08694463223218918, -0.0485352948307991, 0.05295134335756302, -0.1662379503250122, -0.15068690478801727, 0.03398871049284935, 0.06283251196146011, 0.03186952322721481, 0.1280253529548645, 0.08141885697841644, -0.10732853412628174, 0.022690722718834877, -0.004228927195072174, 0.058398615568876266, 0.03891623765230179, 0.006107209715992212, 0.008764320984482765, 0.0961301177740097, -0.10607069730758667, -0.13589619100093842, -0.07336436957120895, -0.014715781435370445, 0.14371353387832642, -0.0302802175283432, 0.07690227776765823, -0.004240254405885935, 0.00013200697139836848, 0.06930823624134064, 0.08137880265712738, 0.016412746161222458, 0.08971183747053146, -0.05237193778157234, -0.05160155147314072, 0.10863113403320312, -0.13533565402030945, 0.17837053537368774, 0.14053137600421906, -0.20532016456127167, 0.029453208670020103, -0.06838275492191315, 0.03670361638069153, -0.008162540383636951, 0.0975119024515152, -0.08272241055965424, -0.02106042578816414, 0.013134466484189034, 0.0052274600602686405, -0.013007243163883686, 0.017682146281003952, -0.07295988500118256, -0.07787393033504486, -0.10233919322490692, 0.08436838537454605, 0.11562882363796234, -0.10282530635595322, 0.14214380085468292, 0.4384984076023102, 0.11495281755924225, 0.21582984924316406, -0.09581480920314789, -0.0412987545132637, 0.007486371789127588, 0.0001535322517156601, -0.04476691037416458, 0.08031861484050751, -0.15973517298698425, -0.038901735097169876, 0.027348900213837624, 0.07128690183162689, 0.11475157737731934, -0.14959022402763367, -0.09639324247837067, -0.00793045200407505, 0.0022841424215584993, -0.1249532699584961, 0.023905446752905846, -0.03974650055170059, 0.04015624523162842, 0.07232289016246796, -0.021535737439990044, 0.13939237594604492, -0.04166141897439957, -0.0639561116695404, 0.07585346698760986, -0.2017085999250412, -0.23179671168327332, -0.12309670448303223, -0.14680525660514832, 0.04366797208786011, 0.05154111236333847, 0.01726446859538555, -0.17635835707187653, -0.015074856579303741, 0.07706750929355621, 0.07820965349674225, -0.20886357128620148, -0.022814949974417686, -0.004290030337870121, 0.0895976573228836, -0.10227091610431671, -0.0017130117630586028, -0.04419664293527603, -0.10150232166051865, 0.0017003051470965147, 0.07279510796070099, -0.137485533952713, 0.13807645440101624, 0.21589438617229462, 0.07225540280342102, 0.07359948754310608, -0.019093448296189308, 0.09936179965734482, -0.10856141895055771, -0.16549113392829895, 0.08348225057125092, -0.06234746053814888, 0.047262318432331085, 0.17534415423870087, 0.03307317942380905, -0.13904969394207, -0.015682822093367577, -0.0402069091796875, -0.15603256225585938, -0.238995760679245, -0.09178274869918823, -0.1182505264878273, 0.16442428529262543, 0.0009358620154671371, 0.06651917099952698, 0.08258313685655594, -0.022042419761419296, 0.16447891294956207, -0.07379321753978729, -0.07578866183757782, -0.006978808436542749, 0.12375060468912125, -0.056660156697034836, -0.03080669604241848, -0.10566964000463486, -0.008295975625514984, 0.1151021271944046, 0.15304014086723328, 0.12214863300323486, 0.2957419455051422, 0.08268889784812927, 0.026645636186003685, 0.08958091586828232, 0.17622539401054382, 0.09495089203119278, 0.07838419824838638, -0.045413073152303696, -0.014814783819019794, 0.014317171648144722, -0.04022889584302902, 0.010141594335436821, 0.14683100581169128, -0.2679629921913147, -0.006678564939647913, -0.2710230350494385, 0.0965198427438736, -0.10913380235433578, 0.11837165057659149, -0.01015760749578476, 0.10194015502929688, 0.11082887649536133, 0.03233652561903, -0.03858073800802231, 0.16613617539405823, 0.08450309932231903, -0.11277695000171661, 0.001758623169735074, 0.03737903758883476, 0.09715615212917328, -0.02818971499800682, 0.12721189856529236, -0.11048974841833115, -0.1464834064245224, 0.013753619976341724, 0.07152791321277618, -0.15373679995536804, 0.3138748109340668, 0.012069208547472954, -0.13481520116329193, -0.01481647603213787, -0.09957809001207352, -0.006440147757530212, 0.1254177987575531, 0.09333524852991104, 0.07935678958892822, -0.2185502052307129, -0.13339371979236603, 0.05872276425361633, -0.00575496768578887, 0.22408108413219452, -0.034034017473459244, -0.11356475204229355, -0.027013886719942093, 0.04241163283586502, -0.06043251231312752, 0.08524788916110992, 0.023536119610071182, -0.08113526552915573, -0.032957352697849274, 0.05323701351881027, 0.012368366122245789, 0.00524376705288887, 0.09360801428556442, 0.020107939839363098, -0.0009265501867048442, 0.01785753294825554, 0.047885000705718994, -0.0675911232829094, -0.1984109878540039, 0.09357594698667526, -0.05215044692158699, 0.0015536568826064467, -0.08013670891523361, -0.15122665464878082, -0.08837161958217621, -0.16009655594825745, 0.12540200352668762, -0.034406669437885284, 0.12700119614601135, -0.06619787961244583, 0.17341409623622894, -0.07871770113706589, 0.04481020197272301, -0.047349292784929276, 0.050332702696323395, -0.007268077693879604, -0.07756082713603973, 0.16585899889469147, -0.15564003586769104, 0.01809087023139, 0.19572502374649048, -0.018915493041276932, 0.07177707552909851, 0.021322092041373253, -0.0636206790804863, 0.23147478699684143, 0.3014698624610901, 0.008138049393892288, 0.1665448248386383, 0.3018903136253357, -0.07466315478086472, -0.2642788887023926, -0.05505012720823288, -0.2841376066207886, -0.05371501296758652, 0.10716094076633453, -0.22523896396160126, 0.06986407935619354, 0.14383509755134583, -0.06471995264291763, 0.30228954553604126, -0.21825523674488068, 0.012589273042976856, 0.15434536337852478, -0.08868814259767532, 0.5515313148498535, -0.1133413165807724, -0.17677772045135498, -0.008122089318931103, -0.08741296827793121, 0.10602109134197235, -0.0340677872300148, 0.06877441704273224, 0.013465235009789467, 0.04797380417585373, 0.048932258039712906, -0.03111894056200981, 0.22701001167297363, 0.008710170164704323, 0.09015397727489471, -0.07378865778446198, -0.18624304234981537, 0.11639340221881866, -0.04359482601284981, -0.08891059458255768, 0.0849778801202774, -0.05942516401410103, -0.11078983545303345, 0.04663389176130295, -0.07950539886951447, -0.024862350896000862, 0.08423490077257156, -0.04678233340382576, -0.042606171220541, -0.008054176345467567, -0.1618063747882843, -0.0002289071271661669, 0.31360217928886414, -0.07096036523580551, 0.16695955395698547, 0.03677211329340935, 0.00038613268407061696, -0.11027684062719345, 0.030288029462099075, -0.05203165486454964, -0.021576624363660812, 0.09578979015350342, -0.11096979677677155, 0.03204701095819473, 0.14160704612731934, -0.04864364117383957, 0.05846960097551346, 0.09256096184253693, -0.0849417969584465, 0.007583672646433115, 0.17753590643405914, -0.17537221312522888, -0.1273445188999176, -0.006135711446404457, -0.09862716495990753, 0.14055661857128143, 0.04394126310944557, 0.05191568285226822, 0.16669964790344238, 0.03967129811644554, -0.029474308714270592, -0.02817419543862343, -0.1153380498290062, -0.0201893113553524, 0.040153320878744125, 0.00045633706031367183, -0.08791285753250122, 0.2262638509273529, 0.06409153342247009, -0.1328488290309906, -0.051157206296920776, 0.2161225974559784, -0.06805316358804703, -0.04911920800805092, -0.223562553524971, 0.10752306133508682, -0.07112517952919006, -0.0965060144662857, 0.05453834682703018, -0.02270081453025341, 0.005106312222778797, 0.181985542178154, 0.03941008821129799, 0.11070270836353302, 0.03738937899470329, -0.02448922023177147, 0.15798696875572205, -0.142850860953331, -0.14191335439682007, -0.025354057550430298, -0.08757315576076508, -0.13844476640224457, -0.026804137974977493, 0.1617041826248169, -0.09177309274673462, -0.14772607386112213, -0.2621181011199951, 0.10968475043773651, -0.16432365775108337, -0.10192688554525375, -0.03469514101743698, -0.08968492597341537, 0.0696166530251503, 0.030301768332719803, -0.03093348816037178, -0.06706760823726654, -0.18593791127204895, 0.0816768929362297, 0.06349513679742813, 0.045533183962106705, -0.017847947776317596, 0.0067379772663116455, 0.1720137596130371, 0.025955144315958023, 0.10040043294429779, 0.16762186586856842, 0.011397695168852806, 0.2246655523777008, -0.1671202927827835, -0.11496317386627197, 0.1336962729692459, -0.026543032377958298, 0.06762003898620605, 0.16792191565036774, -0.0772583931684494, 0.015526676550507545, -0.028136352077126503, 0.07066910713911057, -0.11003983020782471, -0.105624258518219, 0.007937257178127766, 0.02567129209637642, -0.2755882740020752, -0.005599735304713249, -0.19717298448085785, 0.14788752794265747, 0.02579621411859989, 0.03297143429517746, 0.10257530212402344, 0.10404334217309952, 0.08312062919139862, -0.0017710148822516203, 0.03226327523589134, -0.1176818460226059, 0.02753005363047123, -0.059239376336336136, -0.020663779228925705, 0.017624232918024063, 0.36952024698257446, -0.03603357449173927, -0.046802736818790436, 0.003710439894348383, 0.1307835876941681, -0.02139742486178875, 0.017395347356796265, 0.13209912180900574, 0.12607666850090027, -0.08595693111419678, -0.1504845917224884, 0.04888554662466049, -0.04565655067563057, -0.02836887165904045, 0.1464131623506546, 0.05905961990356445, 0.1050296202301979, 0.0908031314611435, -0.014463032595813274, -0.00318976235575974, 0.012856799177825451, -0.15486004948616028, 0.06223496049642563, -0.010558074340224266, 0.012565906159579754, 0.017934376373887062, 0.15238402783870697, -0.005540105979889631, 0.07739730179309845, -0.09889880567789078, 0.004208535887300968, -0.13498884439468384, -0.07913459837436676, 0.03617347031831741, -0.13393273949623108, 0.04141177982091904, -0.01871878281235695, 0.029611799865961075, 0.30386561155319214, 0.02558239921927452, -0.020639164373278618, 0.12512871623039246, -0.1214587539434433, -0.12050267308950424, -0.001594188273884356, -0.029960084706544876, 0.0791488066315651, -0.02633434161543846, -0.0997740775346756, -0.1001306027173996, -0.15166029334068298, -0.09759195148944855, 0.05182836204767227, -0.04993441700935364, -0.059362251311540604, -0.17634081840515137, -0.05707859992980957, -0.05147340148687363, 0.14025864005088806, -0.12263951450586319, 0.15159130096435547, -0.014490418136119843, 0.004084470681846142, 0.04405883327126503, 0.1950942426919937, -0.03644494712352753, 0.08714226633310318, 0.0154351145029068, 0.1522706001996994, -0.05119588226079941, 0.14720745384693146, -0.10931728035211563, -0.04014137014746666, -0.06710435450077057, 0.21513493359088898, 0.25630924105644226, -0.06136954948306084, -0.008937356993556023, -0.012760217301547527, 0.058654606342315674, 0.1073930487036705, 0.16049085557460785, 0.002326392102986574, 0.2802925705909729, -0.03133585304021835, 0.04815128445625305, 0.02901598811149597, 0.013607407920062542, -0.06336209923028946, 0.03397751972079277, 0.07539387792348862, -0.035039983689785004, -0.1412304788827896, 0.15837742388248444, -0.21980468928813934, 0.18157227337360382, 0.11640069633722305, -0.19996967911720276, -0.013728445395827293, -0.04882071167230606, 0.1689416468143463, -0.0856364443898201, 0.1637246012687683, -0.0903693437576294, -0.2108195722103119, -0.2056000679731369, 0.03867346793413162, -0.34623071551322937, -0.254462867975235, 0.10422009229660034, 0.1488201916217804, 0.04015883058309555, -0.018507536500692368, -0.019967829808592796, -0.018367022275924683, 0.04877542704343796, -0.0067357709631323814, 0.06014643982052803, 0.031397558748722076, -0.02988368645310402, -0.24127542972564697, -0.029804671183228493, 0.023964406922459602, -0.07093082368373871, 0.07464958727359772, -0.06874357163906097, -0.022495782002806664, 0.08059766888618469, -0.03066304884850979, 0.03298592567443848, -0.035373736172914505, -0.16326889395713806, 0.027529051527380943, 0.03900543600320816, 0.036012712866067886, 0.00634160777553916, 0.0008072225609794259, -0.03455270454287529, 0.0644603744149208, -0.16716794669628143, -0.16015739738941193, 0.14140215516090393, -0.06745140254497528, 0.2779497504234314, -0.05812826007604599, -0.0809100940823555, 0.04766704887151718, -0.03426874056458473, 0.1807648241519928, -0.07756473124027252, 0.047254521399736404, 0.12766779959201813, 0.011127962730824947, 0.03121316432952881, -0.3092964291572571, 0.11082969605922699, -0.000795336440205574, -0.006093299947679043, -0.07581598311662674 ]
a63eb9e88f0684bc3944ddbbf2844db7ad95d64f
This is a dataset created using [vector-io](https://github.com/ai-northstar-tech/vector-io)
aintech/vdf_medium_articles
[ "vdf", "vector-io", "vector-dataset", "vector-embeddings", "region:us" ]
2024-01-25T08:35:12+00:00
{"tags": ["vdf", "vector-io", "vector-dataset", "vector-embeddings"]}
2024-01-26T10:43:45+00:00
[]
[]
TAGS #vdf #vector-io #vector-dataset #vector-embeddings #region-us
This is a dataset created using vector-io
[]
[ "TAGS\n#vdf #vector-io #vector-dataset #vector-embeddings #region-us \n" ]
[ 27 ]
[ "passage: TAGS\n#vdf #vector-io #vector-dataset #vector-embeddings #region-us \n" ]
[ -0.07180236279964447, -0.08148053288459778, -0.007709508761763573, -0.019173024222254753, 0.14560645818710327, 0.10926879197359085, 0.13584770262241364, 0.07990334182977676, 0.12042045593261719, 0.05055627226829529, 0.1706290990114212, 0.10322222113609314, 0.02330668829381466, 0.05941125750541687, -0.059248361736536026, -0.17309188842773438, 0.12309065461158752, 0.015508302487432957, -0.1911979615688324, -0.006123457569628954, 0.09644954651594162, -0.07563181966543198, 0.058045580983161926, -0.062185242772102356, -0.16142399609088898, 0.013869980350136757, 0.029958732426166534, -0.020493386313319206, 0.07945840805768967, -0.0015086440835148096, 0.11842415481805801, -0.04914211481809616, 0.02579415962100029, -0.2326444387435913, 0.008905508555471897, 0.08325758576393127, -0.06311193853616714, 0.10245553404092789, 0.09926449507474899, 0.034833263605833054, -0.07778108865022659, -0.06686671823263168, -0.030616579577326775, -0.009267302230000496, -0.1348983347415924, -0.19727003574371338, -0.07877200841903687, -0.09680203348398209, 0.015875613316893578, -0.005171640310436487, -0.000517705746460706, 0.14335639774799347, -0.0666094645857811, 0.04810299724340439, 0.10057054460048676, -0.20297159254550934, 0.03167477622628212, 0.27425017952919006, -0.0008105303859338164, 0.1612890362739563, -0.08370545506477356, 0.09327629208564758, 0.056716907769441605, -0.01124878041446209, 0.023242942988872528, 0.0012716074706986547, -0.03330763056874275, 0.09724602848291397, -0.14095719158649445, -0.07423476874828339, 0.32789501547813416, 0.019161634147167206, 0.09231271594762802, 0.05681467801332474, -0.09789670258760452, -0.1187039241194725, -0.016733314841985703, -0.0462469719350338, 0.008996985852718353, 0.09269599616527557, 0.20069703459739685, -0.016411447897553444, -0.09930981695652008, -0.025918787345290184, -0.11623502522706985, 0.08370279520750046, 0.028460245579481125, 0.10251845419406891, -0.3017427921295166, -0.04643966257572174, -0.15714380145072937, -0.03429196774959564, 0.09625453501939774, -0.08199937641620636, -0.018700798973441124, 0.008669673465192318, -0.011026683263480663, -0.2309359461069107, 0.12832707166671753, -0.007743110414594412, 0.058447230607271194, -0.0003178917686454952, -0.06905535608530045, 0.12303312122821808, 0.10490740090608597, 0.05329368636012077, -0.15921787917613983, 0.08581709861755371, 0.009003900922834873, -0.15720650553703308, -0.07294712960720062, -0.08915593475103378, -0.09412873536348343, 0.011137855239212513, -0.01465544756501913, 0.09329486638307571, 0.0014191587688401341, -0.00922873429954052, -0.04464375227689743, -0.011183028109371662, -0.12912139296531677, -0.0047120749950408936, 0.05134141445159912, 0.059134241193532944, 0.054537661373615265, 0.12640886008739471, -0.14897727966308594, -0.0068911039270460606, 0.0028177625499665737, 0.05396962910890579, -0.11753246188163757, 0.02022707834839821, -0.049384381622076035, -0.1620907485485077, 0.047828782349824905, -0.133766308426857, 0.049965228885412216, -0.10587078332901001, 0.030978726223111153, 0.018617304041981697, -0.012652745470404625, -0.03499074652791023, 0.17330370843410492, -0.037265557795763016, -0.01147010363638401, 0.0008548844489268959, -0.005573313217610121, -0.020131798461079597, -0.03625139221549034, 0.07359785586595535, -0.11648387461900711, 0.07241581380367279, -0.12591178715229034, 0.07196078449487686, -0.029837405309081078, 0.044693756848573685, 0.06308866292238235, -0.006888674106448889, -0.14650306105613708, 0.05099721625447273, -0.06144234538078308, 0.056635670363903046, -0.001069656922481954, 0.02880377694964409, 0.04609419032931328, 0.17554345726966858, -0.23845228552818298, 0.012783100828528404, 0.16298525035381317, -0.03583037480711937, -0.10658005625009537, 0.07994022220373154, -0.01544578280299902, -0.030011435970664024, -0.046889062970876694, 0.3127722144126892, -0.042969025671482086, -0.09661640971899033, 0.0956999659538269, 0.1788673996925354, -0.1079905703663826, -0.10275911539793015, 0.07117363810539246, -0.03688632324337959, -0.11880289763212204, 0.002757465234026313, -0.030606726184487343, 0.049705974757671356, -0.11737719923257828, -0.050098489969968796, 0.01992848888039589, -0.02179921232163906, 0.09171184152364731, 0.0068854521960020065, 0.12443848699331284, 0.012719681486487389, 0.11658276617527008, -0.14901599287986755, 0.05921643599867821, -0.02716968022286892, 0.020454464480280876, -0.007105090189725161, 0.22358036041259766, -0.2215496152639389, 0.06605502963066101, -0.1690928041934967, -0.22053499519824982, 0.05219389870762825, 0.09895897656679153, -0.10310255736112595, 0.15027183294296265, 0.07313314825296402, -0.06700360029935837, 0.06176945939660072, 0.024675549939274788, 0.08549404889345169, 0.0261240154504776, -0.06065605580806732, -0.10494212061166763, 0.017033278942108154, -0.15767814218997955, -0.0993892028927803, -0.14757829904556274, -0.04794733598828316, 0.07875803112983704, 0.07182709872722626, 0.09853123128414154, 0.013890317641198635, -0.00909116305410862, -0.0017376609612256289, -0.003624927019700408, -0.02310425229370594, 0.04928829148411751, -0.10087628662586212, -0.05307186767458916, 0.0464622788131237, 0.05315040051937103, 0.23122692108154297, 0.10202333331108093, -0.1712673455476761, 0.07810350507497787, -0.061974864453077316, 0.03718813881278038, 0.08953675627708435, 0.005696312990039587, -0.05009938031435013, -0.0945853739976883, 0.020359262824058533, 0.05136191099882126, 0.0008690636022947729, 0.07864043116569519, 0.06925435364246368, -0.05104278773069382, -0.14468975365161896, 0.09187667071819305, 0.18141891062259674, -0.03358813375234604, 0.17599299550056458, 0.22246074676513672, 0.09836085885763168, 0.08225822448730469, -0.11588472127914429, -0.0877513438463211, 0.014575970359146595, 0.008930844254791737, -0.009205480106174946, 0.07454106211662292, -0.21104824542999268, -0.004328057169914246, 0.0825033113360405, 0.05963968113064766, 0.04262271523475647, -0.09375212341547012, -0.08094416558742523, 0.010735751129686832, -0.00984338391572237, -0.08524920046329498, 0.12528467178344727, -0.03215469792485237, 0.07529324293136597, -0.040819793939590454, -0.040915049612522125, 0.08890565484762192, -0.010358569212257862, -0.07709895074367523, 0.14036430418491364, -0.22133983671665192, -0.1293429434299469, -0.1239272952079773, -0.12017716467380524, -0.07627834379673004, 0.006392818409949541, -0.009064850397408009, -0.1565741002559662, -0.06844848394393921, 0.05160709470510483, 0.07938212901353836, -0.13681283593177795, 0.007550485897809267, 0.03655162453651428, 0.020608967170119286, -0.040306903421878815, -0.11874914914369583, 0.016170457005500793, -0.07805360108613968, 0.12519720196723938, 0.11858902871608734, 0.023829875513911247, 0.13261017203330994, 0.11354774236679077, 0.059157926589250565, 0.0716547966003418, 0.000302226806525141, 0.2929176092147827, -0.11433903872966766, -0.06382603943347931, 0.049605876207351685, -0.05437111854553223, -0.0043157669715583324, 0.1730922907590866, 0.07307426631450653, -0.12102870643138885, -0.11226002126932144, -0.07007808983325958, -0.0749349594116211, -0.24973155558109283, -0.10746623575687408, -0.1639830470085144, -0.04415947571396828, -0.023507017642259598, 0.049329426139593124, -0.009571680799126625, 0.030716734007000923, 0.12416499108076096, 0.10108719766139984, -0.10636486113071442, -0.04408258944749832, 0.05466032028198242, -0.05108180269598961, -0.04502201825380325, -0.0011926019797101617, -0.06699100136756897, 0.07877601683139801, 0.06844478100538254, 0.2769797742366791, 0.14914530515670776, 0.07875829935073853, 0.03532225266098976, 0.13465942442417145, 0.08005871623754501, 0.15661929547786713, -0.013237970881164074, -0.041987042874097824, -0.031801626086235046, -0.003929925616830587, 0.00874637346714735, -0.003928845748305321, 0.14763444662094116, -0.24645259976387024, 0.021031470969319344, -0.11434007436037064, 0.10508403182029724, -0.005334905348718166, 0.1190873309969902, -0.12877009809017181, 0.12589982151985168, 0.0863468125462532, 0.09181427210569382, -0.12679795920848846, 0.05581144988536835, 0.1152421161532402, -0.03851238265633583, -0.08260569721460342, -0.033869314938783646, 0.05597378686070442, -0.08559773117303848, 0.05203189328312874, -0.061262600123882294, 0.01457724068313837, 0.023430244997143745, 0.06681088358163834, -0.2855408191680908, 0.3211740553379059, 0.04063686728477478, -0.07819580286741257, -0.060345809906721115, -0.00487199891358614, -0.004295833874493837, 0.13632744550704956, 0.17369720339775085, 0.09078573435544968, -0.028827602043747902, -0.1753232330083847, -0.0176833663135767, -0.008216281421482563, 0.1664644032716751, -0.03628693148493767, -0.11363600194454193, 0.022979192435741425, 0.006808034144341946, -0.03727153688669205, 0.04357192665338516, -0.025023723021149635, -0.06746356934309006, 0.07505256682634354, 0.08197415620088577, 0.08152882009744644, 0.026426097378134727, 0.012066083028912544, -0.0748511329293251, -0.06824042648077011, -0.12684614956378937, 0.04014499858021736, -0.12555427849292755, 0.07976831495761871, 0.08088364452123642, -0.025753024965524673, 0.01647941768169403, -0.02365982159972191, 0.055603452026844025, -0.08142754435539246, -0.1492289900779724, 0.11701050400733948, -0.09066575020551682, 0.07198460400104523, -0.12097691744565964, 0.08192532509565353, -0.07148773223161697, -0.004544488620012999, -0.0026541342958807945, 0.01754414662718773, 0.062037330120801926, -0.0750214159488678, 0.13897690176963806, 0.09996786713600159, -0.11213485896587372, 0.1027119979262352, -0.10342039912939072, -0.09042582660913467, 0.006994728464633226, -0.010359994135797024, 0.3355264365673065, 0.3220052123069763, -0.07906541228294373, 0.06980554014444351, 0.21570482850074768, -0.08695970475673676, -0.3001821041107178, 0.015866192057728767, -0.1234242245554924, -0.001389215118251741, 0.06361983716487885, -0.15854153037071228, 0.03485551476478577, 0.04242328554391861, -0.0149210961535573, 0.24293214082717896, -0.22744405269622803, -0.019725341349840164, 0.1646607220172882, 0.05606401339173317, 0.36136874556541443, -0.07683960348367691, -0.09746343642473221, 0.0390242263674736, -0.026757940649986267, 0.025673124939203262, 0.08667610585689545, 0.0845021903514862, 0.022251537069678307, 0.025279812514781952, 0.06192801520228386, -0.035714201629161835, 0.20607241988182068, 0.06973838806152344, 0.1149371787905693, -0.02506033144891262, -0.14588426053524017, 0.1743279993534088, 0.012933105230331421, -0.004116842057555914, 0.14285650849342346, -0.03312276303768158, -0.18790802359580994, 0.06852579861879349, -0.03751077875494957, -0.029367152601480484, 0.04397653415799141, -0.08321474492549896, -0.05961396172642708, -0.024368833750486374, -0.004804249852895737, -0.027083750814199448, 0.30119985342025757, 0.07461818307638168, -0.03726828098297119, -0.017604731023311615, -0.09501098841428757, -0.07339932769536972, -0.09642712026834488, -0.08219359070062637, 0.0010253613581880927, 0.1494954526424408, -0.12551143765449524, 0.04618969187140465, 0.11596714705228806, -0.054053906351327896, 0.10355203598737717, 0.09036675095558167, -0.0819803848862648, 0.009267875924706459, 0.12880389392375946, -0.15365779399871826, -0.09894415736198425, 0.0001755241391947493, -0.13135184347629547, 0.0867086723446846, 0.08093255758285522, 0.13089226186275482, 0.046156786382198334, 0.019056053832173347, -0.0403144434094429, 0.08794736862182617, -0.09760680049657822, 0.07214336842298508, 0.11290596425533295, 0.03972151130437851, -0.15453723073005676, 0.10165182501077652, 0.028519393876194954, -0.11515066772699356, 0.007380486000329256, 0.04547470435500145, -0.07182630896568298, -0.11242066323757172, 0.09225968271493912, 0.21808750927448273, 0.046350859105587006, -0.014094154350459576, 0.059822119772434235, -0.0747767984867096, 0.002987103769555688, 0.053240079432725906, 0.009431540966033936, 0.030984897166490555, -0.040877968072891235, -0.09544862061738968, -0.05950377508997917, -0.012678993865847588, -0.035304680466651917, -0.017985781654715538, -0.1485043168067932, -0.1962389498949051, -0.11610051244497299, 0.10989449173212051, -0.06682190299034119, -0.07597755640745163, -0.16946227848529816, 0.023884311318397522, -0.045021627098321915, -0.08229001611471176, 0.02028041146695614, -0.016040386632084846, 0.0733925923705101, -0.010934922844171524, 0.0021095334086567163, -0.01746487431228161, -0.1406761109828949, 0.02010425738990307, 0.01180451363325119, 0.037202320992946625, -0.044781215488910675, -0.08690455555915833, -0.0013192325131967664, -0.02468552440404892, 0.1523311287164688, 0.09263552725315094, 0.011084914207458496, 0.09141310304403305, -0.23645268380641937, -0.09734706580638885, 0.13332000374794006, 0.026494119316339493, 0.14898967742919922, 0.038245588541030884, -0.0010485673556104302, -0.024545198306441307, -0.05064098909497261, 0.047321807593107224, 0.03106512688100338, -0.023087074980139732, -0.004892778117209673, -0.14760835468769073, -0.055586427450180054, -0.01752571575343609, -0.058303941041231155, 0.11928489804267883, 0.013270962983369827, 0.05018178001046181, 0.06110389158129692, 0.13338139653205872, 0.046186938881874084, 0.009243158623576164, 0.007505130488425493, -0.17269304394721985, 0.08242703974246979, -0.0467601977288723, 0.0325273759663105, -0.09639032930135727, 0.252131849527359, 0.12764528393745422, -0.06236950680613518, 0.0028057058807462454, 0.08443250507116318, -0.09472896158695221, 0.02632206119596958, 0.06551022827625275, 0.09639143943786621, -0.04503472521901131, -0.2259110063314438, 0.11573336273431778, 0.09398626536130905, 0.21728478372097015, 0.023811129853129387, -0.033267486840486526, -0.003775285091251135, 0.05615200474858284, 0.04034905880689621, -0.10613766312599182, 0.0016835778951644897, -0.017823951318860054, -0.0225002970546484, 0.10908125340938568, 0.036816805601119995, -0.07679925858974457, 0.0383300706744194, -0.08151338249444962, 0.035493288189172745, -0.07979725301265717, -0.08035343140363693, -0.08254929631948471, -0.14044149219989777, -0.03682447597384453, -0.19966498017311096, 0.04871422424912453, -0.11344172805547714, 0.007459364365786314, 0.15601177513599396, 0.06470489501953125, -0.011873527429997921, 0.10274019092321396, 0.022307785227894783, -0.005270017776638269, 0.07474388927221298, -0.0014242155011743307, -0.03747313842177391, -0.028294935822486877, -0.05082140862941742, -0.061702657490968704, -0.11917261779308319, -0.03602559119462967, 0.006726776249706745, 0.021556461229920387, 0.05242956802248955, -0.15671978890895844, -0.11525119096040726, -0.04379827156662941, -0.007572627160698175, -0.1892344206571579, 0.22314035892486572, 0.027989188209176064, 0.007618292700499296, 0.029195351526141167, 0.09122022241353989, 0.028927018865942955, 0.13706597685813904, -0.07131177932024002, -0.04492070898413658, 0.011030983179807663, 0.1664832979440689, -0.07437912374734879, -0.04803623631596565, -0.10469140112400055, 0.2684241235256195, 0.2749859690666199, -0.08931073546409607, -0.029805529862642288, 0.07159694284200668, 0.012042054906487465, 0.004339274019002914, 0.17616812884807587, 0.024707062169909477, 0.13067319989204407, -0.04358752444386482, 0.06557898223400116, -0.05169843137264252, -0.022026890888810158, -0.07114531099796295, 0.11848341673612595, 0.11960571259260178, -0.07579096406698227, -0.03972547873854637, 0.13453704118728638, -0.17879875004291534, 0.005061928182840347, 0.09970016032457352, -0.12092026323080063, -0.05299113690853119, -0.051606107503175735, 0.06101837009191513, 0.028097068890929222, 0.09507878869771957, -0.07859291881322861, -0.10084845870733261, 0.043896812945604324, 0.011534900404512882, -0.16856291890144348, -0.13489437103271484, 0.05298902839422226, -0.10097434371709824, 0.12238556146621704, 0.012865899130702019, 0.026155367493629456, -0.01276884414255619, 0.008090315386652946, -0.07121432572603226, 0.014549324288964272, -0.004886817652732134, 0.04039803892374039, -0.13711777329444885, 0.08554769307374954, -0.04453611746430397, -0.0607648529112339, 0.0686107724905014, -0.12182588130235672, -0.01036214828491211, 0.019093574956059456, -0.06714774668216705, 0.0710485503077507, -0.011215115897357464, -0.08982843160629272, 0.028442805632948875, 0.05406981334090233, 0.03233369067311287, 0.0016697135288268328, -0.07146681845188141, 0.03845850005745888, 0.026321275159716606, -0.05073462054133415, -0.10980584472417831, 0.00003499513695714995, -0.050572771579027176, 0.15365640819072723, 0.0053634303621947765, -0.000979745527729392, -0.0031250426545739174, -0.1200474351644516, 0.06528399884700775, -0.07116997987031937, 0.016855599358677864, 0.21745702624320984, -0.025598470121622086, -0.017455942928791046, -0.11258427053689957, 0.06815893948078156, -0.007642684504389763, -0.007723580580204725, -0.08212197571992874 ]
37a32f8a73b9c774de39cf386674327c7d3d9e14
# Dataset Card for Evaluation run of ibivibiv/aegolius-acadicus-30b <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [ibivibiv/aegolius-acadicus-30b](https://huggingface.co/ibivibiv/aegolius-acadicus-30b) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_ibivibiv__aegolius-acadicus-30b", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-01-25T08:41:28.082474](https://huggingface.co/datasets/open-llm-leaderboard/details_ibivibiv__aegolius-acadicus-30b/blob/main/results_2024-01-25T08-41-28.082474.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.6566791267920726, "acc_stderr": 0.03204461446226675, "acc_norm": 0.6559064592526, "acc_norm_stderr": 0.032719772118023696, "mc1": 0.5177478580171359, "mc1_stderr": 0.017492470843075356, "mc2": 0.6707176642401714, "mc2_stderr": 0.015136561645539275 }, "harness|arc:challenge|25": { "acc": 0.7005119453924915, "acc_stderr": 0.01338502163731357, "acc_norm": 0.7261092150170648, "acc_norm_stderr": 0.013032004972989506 }, "harness|hellaswag|10": { "acc": 0.7103166699860586, "acc_stderr": 0.004526883021027629, "acc_norm": 0.880103565026887, "acc_norm_stderr": 0.0032417650929121374 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.34, "acc_stderr": 0.04760952285695235, "acc_norm": 0.34, "acc_norm_stderr": 0.04760952285695235 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.6666666666666666, "acc_stderr": 0.04072314811876837, "acc_norm": 0.6666666666666666, "acc_norm_stderr": 0.04072314811876837 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.6907894736842105, "acc_stderr": 0.037610708698674805, "acc_norm": 0.6907894736842105, "acc_norm_stderr": 0.037610708698674805 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.65, "acc_stderr": 0.0479372485441102, "acc_norm": 0.65, "acc_norm_stderr": 0.0479372485441102 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.7132075471698113, "acc_stderr": 0.027834912527544057, "acc_norm": 0.7132075471698113, "acc_norm_stderr": 0.027834912527544057 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.7708333333333334, "acc_stderr": 0.03514697467862388, "acc_norm": 0.7708333333333334, "acc_norm_stderr": 0.03514697467862388 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.48, "acc_stderr": 0.050211673156867795, "acc_norm": 0.48, "acc_norm_stderr": 0.050211673156867795 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.53, "acc_stderr": 0.05016135580465919, "acc_norm": 0.53, "acc_norm_stderr": 0.05016135580465919 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.33, "acc_stderr": 0.047258156262526045, "acc_norm": 0.33, "acc_norm_stderr": 0.047258156262526045 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.6705202312138728, "acc_stderr": 0.03583901754736412, "acc_norm": 0.6705202312138728, "acc_norm_stderr": 0.03583901754736412 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.4019607843137255, "acc_stderr": 0.04878608714466996, "acc_norm": 0.4019607843137255, "acc_norm_stderr": 0.04878608714466996 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.77, "acc_stderr": 0.042295258468165065, "acc_norm": 0.77, "acc_norm_stderr": 0.042295258468165065 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.5787234042553191, "acc_stderr": 0.03227834510146268, "acc_norm": 0.5787234042553191, "acc_norm_stderr": 0.03227834510146268 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.5, "acc_stderr": 0.047036043419179864, "acc_norm": 0.5, "acc_norm_stderr": 0.047036043419179864 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.5517241379310345, "acc_stderr": 0.04144311810878152, "acc_norm": 0.5517241379310345, "acc_norm_stderr": 0.04144311810878152 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.42592592592592593, "acc_stderr": 0.02546714904546955, "acc_norm": 0.42592592592592593, "acc_norm_stderr": 0.02546714904546955 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.48412698412698413, "acc_stderr": 0.04469881854072606, "acc_norm": 0.48412698412698413, "acc_norm_stderr": 0.04469881854072606 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.37, "acc_stderr": 0.04852365870939099, "acc_norm": 0.37, "acc_norm_stderr": 0.04852365870939099 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.7806451612903226, "acc_stderr": 0.023540799358723302, "acc_norm": 0.7806451612903226, "acc_norm_stderr": 0.023540799358723302 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.4975369458128079, "acc_stderr": 0.03517945038691063, "acc_norm": 0.4975369458128079, "acc_norm_stderr": 0.03517945038691063 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.69, "acc_stderr": 0.04648231987117316, "acc_norm": 0.69, "acc_norm_stderr": 0.04648231987117316 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.7757575757575758, "acc_stderr": 0.03256866661681102, "acc_norm": 0.7757575757575758, "acc_norm_stderr": 0.03256866661681102 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.803030303030303, "acc_stderr": 0.028335609732463362, "acc_norm": 0.803030303030303, "acc_norm_stderr": 0.028335609732463362 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.8963730569948186, "acc_stderr": 0.021995311963644237, "acc_norm": 0.8963730569948186, "acc_norm_stderr": 0.021995311963644237 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.6641025641025641, "acc_stderr": 0.023946724741563973, "acc_norm": 0.6641025641025641, "acc_norm_stderr": 0.023946724741563973 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.36666666666666664, "acc_stderr": 0.029381620726465066, "acc_norm": 0.36666666666666664, "acc_norm_stderr": 0.029381620726465066 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.6890756302521008, "acc_stderr": 0.030066761582977938, "acc_norm": 0.6890756302521008, "acc_norm_stderr": 0.030066761582977938 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.37748344370860926, "acc_stderr": 0.03958027231121569, "acc_norm": 0.37748344370860926, "acc_norm_stderr": 0.03958027231121569 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.8403669724770643, "acc_stderr": 0.015703498348461783, "acc_norm": 0.8403669724770643, "acc_norm_stderr": 0.015703498348461783 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.5, "acc_stderr": 0.034099716973523674, "acc_norm": 0.5, "acc_norm_stderr": 0.034099716973523674 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.8480392156862745, "acc_stderr": 0.0251956584289318, "acc_norm": 0.8480392156862745, "acc_norm_stderr": 0.0251956584289318 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.8059071729957806, "acc_stderr": 0.025744902532290902, "acc_norm": 0.8059071729957806, "acc_norm_stderr": 0.025744902532290902 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.6995515695067265, "acc_stderr": 0.03076935200822914, "acc_norm": 0.6995515695067265, "acc_norm_stderr": 0.03076935200822914 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.7938931297709924, "acc_stderr": 0.03547771004159465, "acc_norm": 0.7938931297709924, "acc_norm_stderr": 0.03547771004159465 }, "harness|hendrycksTest-international_law|5": { "acc": 0.8099173553719008, "acc_stderr": 0.03581796951709282, "acc_norm": 0.8099173553719008, "acc_norm_stderr": 0.03581796951709282 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.7870370370370371, "acc_stderr": 0.0395783547198098, "acc_norm": 0.7870370370370371, "acc_norm_stderr": 0.0395783547198098 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.7484662576687117, "acc_stderr": 0.03408997886857529, "acc_norm": 0.7484662576687117, "acc_norm_stderr": 0.03408997886857529 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.4375, "acc_stderr": 0.04708567521880525, "acc_norm": 0.4375, "acc_norm_stderr": 0.04708567521880525 }, "harness|hendrycksTest-management|5": { "acc": 0.7766990291262136, "acc_stderr": 0.04123553189891431, "acc_norm": 0.7766990291262136, "acc_norm_stderr": 0.04123553189891431 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8888888888888888, "acc_stderr": 0.020588491316092375, "acc_norm": 0.8888888888888888, "acc_norm_stderr": 0.020588491316092375 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.72, "acc_stderr": 0.045126085985421276, "acc_norm": 0.72, "acc_norm_stderr": 0.045126085985421276 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.8314176245210728, "acc_stderr": 0.013387895731543604, "acc_norm": 0.8314176245210728, "acc_norm_stderr": 0.013387895731543604 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.7398843930635838, "acc_stderr": 0.023618678310069356, "acc_norm": 0.7398843930635838, "acc_norm_stderr": 0.023618678310069356 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.4324022346368715, "acc_stderr": 0.016568971233548606, "acc_norm": 0.4324022346368715, "acc_norm_stderr": 0.016568971233548606 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.7189542483660131, "acc_stderr": 0.025738854797818733, "acc_norm": 0.7189542483660131, "acc_norm_stderr": 0.025738854797818733 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.7041800643086816, "acc_stderr": 0.025922371788818763, "acc_norm": 0.7041800643086816, "acc_norm_stderr": 0.025922371788818763 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.7469135802469136, "acc_stderr": 0.024191808600712995, "acc_norm": 0.7469135802469136, "acc_norm_stderr": 0.024191808600712995 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.5035460992907801, "acc_stderr": 0.02982674915328092, "acc_norm": 0.5035460992907801, "acc_norm_stderr": 0.02982674915328092 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.4758800521512386, "acc_stderr": 0.012755368722863937, "acc_norm": 0.4758800521512386, "acc_norm_stderr": 0.012755368722863937 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.6727941176470589, "acc_stderr": 0.02850145286039656, "acc_norm": 0.6727941176470589, "acc_norm_stderr": 0.02850145286039656 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.6666666666666666, "acc_stderr": 0.019070985589687495, "acc_norm": 0.6666666666666666, "acc_norm_stderr": 0.019070985589687495 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6727272727272727, "acc_stderr": 0.0449429086625209, "acc_norm": 0.6727272727272727, "acc_norm_stderr": 0.0449429086625209 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.7183673469387755, "acc_stderr": 0.028795185574291293, "acc_norm": 0.7183673469387755, "acc_norm_stderr": 0.028795185574291293 }, "harness|hendrycksTest-sociology|5": { "acc": 0.8407960199004975, "acc_stderr": 0.025870646766169136, "acc_norm": 0.8407960199004975, "acc_norm_stderr": 0.025870646766169136 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.85, "acc_stderr": 0.035887028128263686, "acc_norm": 0.85, "acc_norm_stderr": 0.035887028128263686 }, "harness|hendrycksTest-virology|5": { "acc": 0.5542168674698795, "acc_stderr": 0.03869543323472101, "acc_norm": 0.5542168674698795, "acc_norm_stderr": 0.03869543323472101 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.8421052631578947, "acc_stderr": 0.027966785859160893, "acc_norm": 0.8421052631578947, "acc_norm_stderr": 0.027966785859160893 }, "harness|truthfulqa:mc|0": { "mc1": 0.5177478580171359, "mc1_stderr": 0.017492470843075356, "mc2": 0.6707176642401714, "mc2_stderr": 0.015136561645539275 }, "harness|winogrande|5": { "acc": 0.8492501973164956, "acc_stderr": 0.010056094631479672 }, "harness|gsm8k|5": { "acc": 0.7050796057619408, "acc_stderr": 0.012560698010954772 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_ibivibiv__aegolius-acadicus-30b
[ "region:us" ]
2024-01-25T08:42:31+00:00
{"pretty_name": "Evaluation run of ibivibiv/aegolius-acadicus-30b", "dataset_summary": "Dataset automatically created during the evaluation run of model [ibivibiv/aegolius-acadicus-30b](https://huggingface.co/ibivibiv/aegolius-acadicus-30b) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_ibivibiv__aegolius-acadicus-30b\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-01-25T08:41:28.082474](https://huggingface.co/datasets/open-llm-leaderboard/details_ibivibiv__aegolius-acadicus-30b/blob/main/results_2024-01-25T08-41-28.082474.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.6566791267920726,\n \"acc_stderr\": 0.03204461446226675,\n \"acc_norm\": 0.6559064592526,\n \"acc_norm_stderr\": 0.032719772118023696,\n \"mc1\": 0.5177478580171359,\n \"mc1_stderr\": 0.017492470843075356,\n \"mc2\": 0.6707176642401714,\n \"mc2_stderr\": 0.015136561645539275\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.7005119453924915,\n \"acc_stderr\": 0.01338502163731357,\n \"acc_norm\": 0.7261092150170648,\n \"acc_norm_stderr\": 0.013032004972989506\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.7103166699860586,\n \"acc_stderr\": 0.004526883021027629,\n \"acc_norm\": 0.880103565026887,\n \"acc_norm_stderr\": 0.0032417650929121374\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.34,\n \"acc_stderr\": 0.04760952285695235,\n \"acc_norm\": 0.34,\n \"acc_norm_stderr\": 0.04760952285695235\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.6666666666666666,\n \"acc_stderr\": 0.04072314811876837,\n \"acc_norm\": 0.6666666666666666,\n \"acc_norm_stderr\": 0.04072314811876837\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.6907894736842105,\n \"acc_stderr\": 0.037610708698674805,\n \"acc_norm\": 0.6907894736842105,\n \"acc_norm_stderr\": 0.037610708698674805\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.65,\n \"acc_stderr\": 0.0479372485441102,\n \"acc_norm\": 0.65,\n \"acc_norm_stderr\": 0.0479372485441102\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.7132075471698113,\n \"acc_stderr\": 0.027834912527544057,\n \"acc_norm\": 0.7132075471698113,\n \"acc_norm_stderr\": 0.027834912527544057\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.7708333333333334,\n \"acc_stderr\": 0.03514697467862388,\n \"acc_norm\": 0.7708333333333334,\n \"acc_norm_stderr\": 0.03514697467862388\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.48,\n \"acc_stderr\": 0.050211673156867795,\n \"acc_norm\": 0.48,\n \"acc_norm_stderr\": 0.050211673156867795\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.53,\n \"acc_stderr\": 0.05016135580465919,\n \"acc_norm\": 0.53,\n \"acc_norm_stderr\": 0.05016135580465919\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.33,\n \"acc_stderr\": 0.047258156262526045,\n \"acc_norm\": 0.33,\n \"acc_norm_stderr\": 0.047258156262526045\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.6705202312138728,\n \"acc_stderr\": 0.03583901754736412,\n \"acc_norm\": 0.6705202312138728,\n \"acc_norm_stderr\": 0.03583901754736412\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.4019607843137255,\n \"acc_stderr\": 0.04878608714466996,\n \"acc_norm\": 0.4019607843137255,\n \"acc_norm_stderr\": 0.04878608714466996\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.77,\n \"acc_stderr\": 0.042295258468165065,\n \"acc_norm\": 0.77,\n \"acc_norm_stderr\": 0.042295258468165065\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.5787234042553191,\n \"acc_stderr\": 0.03227834510146268,\n \"acc_norm\": 0.5787234042553191,\n \"acc_norm_stderr\": 0.03227834510146268\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.5,\n \"acc_stderr\": 0.047036043419179864,\n \"acc_norm\": 0.5,\n \"acc_norm_stderr\": 0.047036043419179864\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.5517241379310345,\n \"acc_stderr\": 0.04144311810878152,\n \"acc_norm\": 0.5517241379310345,\n \"acc_norm_stderr\": 0.04144311810878152\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.42592592592592593,\n \"acc_stderr\": 0.02546714904546955,\n \"acc_norm\": 0.42592592592592593,\n \"acc_norm_stderr\": 0.02546714904546955\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.48412698412698413,\n \"acc_stderr\": 0.04469881854072606,\n \"acc_norm\": 0.48412698412698413,\n \"acc_norm_stderr\": 0.04469881854072606\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.37,\n \"acc_stderr\": 0.04852365870939099,\n \"acc_norm\": 0.37,\n \"acc_norm_stderr\": 0.04852365870939099\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.7806451612903226,\n \"acc_stderr\": 0.023540799358723302,\n \"acc_norm\": 0.7806451612903226,\n \"acc_norm_stderr\": 0.023540799358723302\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.4975369458128079,\n \"acc_stderr\": 0.03517945038691063,\n \"acc_norm\": 0.4975369458128079,\n \"acc_norm_stderr\": 0.03517945038691063\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.69,\n \"acc_stderr\": 0.04648231987117316,\n \"acc_norm\": 0.69,\n \"acc_norm_stderr\": 0.04648231987117316\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.7757575757575758,\n \"acc_stderr\": 0.03256866661681102,\n \"acc_norm\": 0.7757575757575758,\n \"acc_norm_stderr\": 0.03256866661681102\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.803030303030303,\n \"acc_stderr\": 0.028335609732463362,\n \"acc_norm\": 0.803030303030303,\n \"acc_norm_stderr\": 0.028335609732463362\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.8963730569948186,\n \"acc_stderr\": 0.021995311963644237,\n \"acc_norm\": 0.8963730569948186,\n \"acc_norm_stderr\": 0.021995311963644237\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.6641025641025641,\n \"acc_stderr\": 0.023946724741563973,\n \"acc_norm\": 0.6641025641025641,\n \"acc_norm_stderr\": 0.023946724741563973\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.36666666666666664,\n \"acc_stderr\": 0.029381620726465066,\n \"acc_norm\": 0.36666666666666664,\n \"acc_norm_stderr\": 0.029381620726465066\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.6890756302521008,\n \"acc_stderr\": 0.030066761582977938,\n \"acc_norm\": 0.6890756302521008,\n \"acc_norm_stderr\": 0.030066761582977938\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.37748344370860926,\n \"acc_stderr\": 0.03958027231121569,\n \"acc_norm\": 0.37748344370860926,\n \"acc_norm_stderr\": 0.03958027231121569\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.8403669724770643,\n \"acc_stderr\": 0.015703498348461783,\n \"acc_norm\": 0.8403669724770643,\n \"acc_norm_stderr\": 0.015703498348461783\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.5,\n \"acc_stderr\": 0.034099716973523674,\n \"acc_norm\": 0.5,\n \"acc_norm_stderr\": 0.034099716973523674\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.8480392156862745,\n \"acc_stderr\": 0.0251956584289318,\n \"acc_norm\": 0.8480392156862745,\n \"acc_norm_stderr\": 0.0251956584289318\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.8059071729957806,\n \"acc_stderr\": 0.025744902532290902,\n \"acc_norm\": 0.8059071729957806,\n \"acc_norm_stderr\": 0.025744902532290902\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.6995515695067265,\n \"acc_stderr\": 0.03076935200822914,\n \"acc_norm\": 0.6995515695067265,\n \"acc_norm_stderr\": 0.03076935200822914\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.7938931297709924,\n \"acc_stderr\": 0.03547771004159465,\n \"acc_norm\": 0.7938931297709924,\n \"acc_norm_stderr\": 0.03547771004159465\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.8099173553719008,\n \"acc_stderr\": 0.03581796951709282,\n \"acc_norm\": 0.8099173553719008,\n \"acc_norm_stderr\": 0.03581796951709282\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.7870370370370371,\n \"acc_stderr\": 0.0395783547198098,\n \"acc_norm\": 0.7870370370370371,\n \"acc_norm_stderr\": 0.0395783547198098\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.7484662576687117,\n \"acc_stderr\": 0.03408997886857529,\n \"acc_norm\": 0.7484662576687117,\n \"acc_norm_stderr\": 0.03408997886857529\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.4375,\n \"acc_stderr\": 0.04708567521880525,\n \"acc_norm\": 0.4375,\n \"acc_norm_stderr\": 0.04708567521880525\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.7766990291262136,\n \"acc_stderr\": 0.04123553189891431,\n \"acc_norm\": 0.7766990291262136,\n \"acc_norm_stderr\": 0.04123553189891431\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8888888888888888,\n \"acc_stderr\": 0.020588491316092375,\n \"acc_norm\": 0.8888888888888888,\n \"acc_norm_stderr\": 0.020588491316092375\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.72,\n \"acc_stderr\": 0.045126085985421276,\n \"acc_norm\": 0.72,\n \"acc_norm_stderr\": 0.045126085985421276\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.8314176245210728,\n \"acc_stderr\": 0.013387895731543604,\n \"acc_norm\": 0.8314176245210728,\n \"acc_norm_stderr\": 0.013387895731543604\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.7398843930635838,\n \"acc_stderr\": 0.023618678310069356,\n \"acc_norm\": 0.7398843930635838,\n \"acc_norm_stderr\": 0.023618678310069356\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.4324022346368715,\n \"acc_stderr\": 0.016568971233548606,\n \"acc_norm\": 0.4324022346368715,\n \"acc_norm_stderr\": 0.016568971233548606\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.7189542483660131,\n \"acc_stderr\": 0.025738854797818733,\n \"acc_norm\": 0.7189542483660131,\n \"acc_norm_stderr\": 0.025738854797818733\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.7041800643086816,\n \"acc_stderr\": 0.025922371788818763,\n \"acc_norm\": 0.7041800643086816,\n \"acc_norm_stderr\": 0.025922371788818763\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.7469135802469136,\n \"acc_stderr\": 0.024191808600712995,\n \"acc_norm\": 0.7469135802469136,\n \"acc_norm_stderr\": 0.024191808600712995\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.5035460992907801,\n \"acc_stderr\": 0.02982674915328092,\n \"acc_norm\": 0.5035460992907801,\n \"acc_norm_stderr\": 0.02982674915328092\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.4758800521512386,\n \"acc_stderr\": 0.012755368722863937,\n \"acc_norm\": 0.4758800521512386,\n \"acc_norm_stderr\": 0.012755368722863937\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.6727941176470589,\n \"acc_stderr\": 0.02850145286039656,\n \"acc_norm\": 0.6727941176470589,\n \"acc_norm_stderr\": 0.02850145286039656\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.6666666666666666,\n \"acc_stderr\": 0.019070985589687495,\n \"acc_norm\": 0.6666666666666666,\n \"acc_norm_stderr\": 0.019070985589687495\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6727272727272727,\n \"acc_stderr\": 0.0449429086625209,\n \"acc_norm\": 0.6727272727272727,\n \"acc_norm_stderr\": 0.0449429086625209\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.7183673469387755,\n \"acc_stderr\": 0.028795185574291293,\n \"acc_norm\": 0.7183673469387755,\n \"acc_norm_stderr\": 0.028795185574291293\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.8407960199004975,\n \"acc_stderr\": 0.025870646766169136,\n \"acc_norm\": 0.8407960199004975,\n \"acc_norm_stderr\": 0.025870646766169136\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.85,\n \"acc_stderr\": 0.035887028128263686,\n \"acc_norm\": 0.85,\n \"acc_norm_stderr\": 0.035887028128263686\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5542168674698795,\n \"acc_stderr\": 0.03869543323472101,\n \"acc_norm\": 0.5542168674698795,\n \"acc_norm_stderr\": 0.03869543323472101\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.8421052631578947,\n \"acc_stderr\": 0.027966785859160893,\n \"acc_norm\": 0.8421052631578947,\n \"acc_norm_stderr\": 0.027966785859160893\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.5177478580171359,\n \"mc1_stderr\": 0.017492470843075356,\n \"mc2\": 0.6707176642401714,\n \"mc2_stderr\": 0.015136561645539275\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.8492501973164956,\n \"acc_stderr\": 0.010056094631479672\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.7050796057619408,\n \"acc_stderr\": 0.012560698010954772\n }\n}\n```", "repo_url": "https://huggingface.co/ibivibiv/aegolius-acadicus-30b", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_01_25T08_40_13.766236", "path": ["**/details_harness|arc:challenge|25_2024-01-25T08-40-13.766236.parquet"]}, {"split": "2024_01_25T08_41_28.082474", "path": ["**/details_harness|arc:challenge|25_2024-01-25T08-41-28.082474.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-01-25T08-41-28.082474.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_01_25T08_40_13.766236", "path": ["**/details_harness|gsm8k|5_2024-01-25T08-40-13.766236.parquet"]}, {"split": "2024_01_25T08_41_28.082474", "path": ["**/details_harness|gsm8k|5_2024-01-25T08-41-28.082474.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-01-25T08-41-28.082474.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_01_25T08_40_13.766236", "path": ["**/details_harness|hellaswag|10_2024-01-25T08-40-13.766236.parquet"]}, {"split": "2024_01_25T08_41_28.082474", "path": ["**/details_harness|hellaswag|10_2024-01-25T08-41-28.082474.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-01-25T08-41-28.082474.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_01_25T08_40_13.766236", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-25T08-40-13.766236.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-25T08-40-13.766236.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-25T08-40-13.766236.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-25T08-40-13.766236.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-25T08-40-13.766236.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-25T08-40-13.766236.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-25T08-40-13.766236.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-25T08-40-13.766236.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-25T08-40-13.766236.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-25T08-40-13.766236.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-25T08-40-13.766236.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-25T08-40-13.766236.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-25T08-40-13.766236.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-25T08-40-13.766236.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-25T08-40-13.766236.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-25T08-40-13.766236.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-25T08-40-13.766236.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-25T08-40-13.766236.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-25T08-40-13.766236.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-25T08-40-13.766236.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-25T08-40-13.766236.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-25T08-40-13.766236.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-25T08-40-13.766236.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-25T08-40-13.766236.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-25T08-40-13.766236.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-25T08-40-13.766236.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-25T08-40-13.766236.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-25T08-40-13.766236.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-25T08-40-13.766236.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-25T08-40-13.766236.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-25T08-40-13.766236.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-25T08-40-13.766236.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-25T08-40-13.766236.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-25T08-40-13.766236.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-25T08-40-13.766236.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-25T08-40-13.766236.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-25T08-40-13.766236.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-25T08-40-13.766236.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-25T08-40-13.766236.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-25T08-40-13.766236.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-25T08-40-13.766236.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-25T08-40-13.766236.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-25T08-40-13.766236.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-25T08-40-13.766236.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-25T08-40-13.766236.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-25T08-40-13.766236.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-25T08-40-13.766236.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-25T08-40-13.766236.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-25T08-40-13.766236.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-25T08-40-13.766236.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-25T08-40-13.766236.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-25T08-40-13.766236.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-25T08-40-13.766236.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-25T08-40-13.766236.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-25T08-40-13.766236.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-25T08-40-13.766236.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-25T08-40-13.766236.parquet"]}, {"split": "2024_01_25T08_41_28.082474", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-25T08-41-28.082474.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-25T08-41-28.082474.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-25T08-41-28.082474.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-25T08-41-28.082474.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-25T08-41-28.082474.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-25T08-41-28.082474.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-25T08-41-28.082474.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-25T08-41-28.082474.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-25T08-41-28.082474.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-25T08-41-28.082474.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-25T08-41-28.082474.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-25T08-41-28.082474.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-25T08-41-28.082474.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-25T08-41-28.082474.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-25T08-41-28.082474.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-25T08-41-28.082474.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-25T08-41-28.082474.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-25T08-41-28.082474.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-25T08-41-28.082474.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-25T08-41-28.082474.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-25T08-41-28.082474.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-25T08-41-28.082474.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-25T08-41-28.082474.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-25T08-41-28.082474.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-25T08-41-28.082474.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-25T08-41-28.082474.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-25T08-41-28.082474.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-25T08-41-28.082474.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-25T08-41-28.082474.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-25T08-41-28.082474.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-25T08-41-28.082474.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-25T08-41-28.082474.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-25T08-41-28.082474.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-25T08-41-28.082474.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-25T08-41-28.082474.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-25T08-41-28.082474.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-25T08-41-28.082474.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-25T08-41-28.082474.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-25T08-41-28.082474.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-25T08-41-28.082474.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-25T08-41-28.082474.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-25T08-41-28.082474.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-25T08-41-28.082474.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-25T08-41-28.082474.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-25T08-41-28.082474.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-25T08-41-28.082474.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-25T08-41-28.082474.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-25T08-41-28.082474.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-25T08-41-28.082474.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-25T08-41-28.082474.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-25T08-41-28.082474.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-25T08-41-28.082474.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-25T08-41-28.082474.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-25T08-41-28.082474.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-25T08-41-28.082474.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-25T08-41-28.082474.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-25T08-41-28.082474.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-25T08-41-28.082474.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-25T08-41-28.082474.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-25T08-41-28.082474.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-25T08-41-28.082474.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-25T08-41-28.082474.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-25T08-41-28.082474.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-25T08-41-28.082474.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-25T08-41-28.082474.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-25T08-41-28.082474.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-25T08-41-28.082474.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-25T08-41-28.082474.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-25T08-41-28.082474.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-25T08-41-28.082474.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-25T08-41-28.082474.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-25T08-41-28.082474.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-25T08-41-28.082474.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-25T08-41-28.082474.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-25T08-41-28.082474.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-25T08-41-28.082474.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-25T08-41-28.082474.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-25T08-41-28.082474.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-25T08-41-28.082474.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-25T08-41-28.082474.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-25T08-41-28.082474.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-25T08-41-28.082474.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-25T08-41-28.082474.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-25T08-41-28.082474.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-25T08-41-28.082474.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-25T08-41-28.082474.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-25T08-41-28.082474.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-25T08-41-28.082474.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-25T08-41-28.082474.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-25T08-41-28.082474.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-25T08-41-28.082474.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-25T08-41-28.082474.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-25T08-41-28.082474.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-25T08-41-28.082474.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-25T08-41-28.082474.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-25T08-41-28.082474.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-25T08-41-28.082474.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-25T08-41-28.082474.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-25T08-41-28.082474.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-25T08-41-28.082474.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-25T08-41-28.082474.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-25T08-41-28.082474.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-25T08-41-28.082474.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-25T08-41-28.082474.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-25T08-41-28.082474.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-25T08-41-28.082474.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-25T08-41-28.082474.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-25T08-41-28.082474.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-25T08-41-28.082474.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-25T08-41-28.082474.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-25T08-41-28.082474.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-25T08-41-28.082474.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-25T08-41-28.082474.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-25T08-41-28.082474.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_01_25T08_40_13.766236", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-25T08-40-13.766236.parquet"]}, {"split": "2024_01_25T08_41_28.082474", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-25T08-41-28.082474.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-25T08-41-28.082474.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_01_25T08_40_13.766236", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-25T08-40-13.766236.parquet"]}, {"split": "2024_01_25T08_41_28.082474", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-25T08-41-28.082474.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-25T08-41-28.082474.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_01_25T08_40_13.766236", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-25T08-40-13.766236.parquet"]}, {"split": "2024_01_25T08_41_28.082474", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-25T08-41-28.082474.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-25T08-41-28.082474.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_01_25T08_40_13.766236", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-25T08-40-13.766236.parquet"]}, {"split": "2024_01_25T08_41_28.082474", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-25T08-41-28.082474.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-25T08-41-28.082474.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_01_25T08_40_13.766236", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-25T08-40-13.766236.parquet"]}, {"split": "2024_01_25T08_41_28.082474", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-25T08-41-28.082474.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-25T08-41-28.082474.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_01_25T08_40_13.766236", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-25T08-40-13.766236.parquet"]}, {"split": "2024_01_25T08_41_28.082474", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-25T08-41-28.082474.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-25T08-41-28.082474.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_01_25T08_40_13.766236", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-25T08-40-13.766236.parquet"]}, {"split": "2024_01_25T08_41_28.082474", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-25T08-41-28.082474.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-25T08-41-28.082474.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_01_25T08_40_13.766236", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-25T08-40-13.766236.parquet"]}, {"split": "2024_01_25T08_41_28.082474", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-25T08-41-28.082474.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-25T08-41-28.082474.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_01_25T08_40_13.766236", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-25T08-40-13.766236.parquet"]}, {"split": "2024_01_25T08_41_28.082474", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-25T08-41-28.082474.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-25T08-41-28.082474.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_01_25T08_40_13.766236", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-25T08-40-13.766236.parquet"]}, {"split": "2024_01_25T08_41_28.082474", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-25T08-41-28.082474.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-25T08-41-28.082474.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_01_25T08_40_13.766236", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-25T08-40-13.766236.parquet"]}, {"split": "2024_01_25T08_41_28.082474", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-25T08-41-28.082474.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-25T08-41-28.082474.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_01_25T08_40_13.766236", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-25T08-40-13.766236.parquet"]}, {"split": "2024_01_25T08_41_28.082474", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-25T08-41-28.082474.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-25T08-41-28.082474.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_01_25T08_40_13.766236", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-25T08-40-13.766236.parquet"]}, {"split": "2024_01_25T08_41_28.082474", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-25T08-41-28.082474.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-25T08-41-28.082474.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_01_25T08_40_13.766236", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-25T08-40-13.766236.parquet"]}, {"split": "2024_01_25T08_41_28.082474", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-25T08-41-28.082474.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-25T08-41-28.082474.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_01_25T08_40_13.766236", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-25T08-40-13.766236.parquet"]}, {"split": "2024_01_25T08_41_28.082474", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-25T08-41-28.082474.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-25T08-41-28.082474.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_01_25T08_40_13.766236", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-25T08-40-13.766236.parquet"]}, {"split": "2024_01_25T08_41_28.082474", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-25T08-41-28.082474.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-25T08-41-28.082474.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_01_25T08_40_13.766236", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-25T08-40-13.766236.parquet"]}, {"split": "2024_01_25T08_41_28.082474", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-25T08-41-28.082474.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-25T08-41-28.082474.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_01_25T08_40_13.766236", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-25T08-40-13.766236.parquet"]}, {"split": "2024_01_25T08_41_28.082474", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-25T08-41-28.082474.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-25T08-41-28.082474.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_01_25T08_40_13.766236", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-25T08-40-13.766236.parquet"]}, {"split": "2024_01_25T08_41_28.082474", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-25T08-41-28.082474.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-25T08-41-28.082474.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_01_25T08_40_13.766236", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-25T08-40-13.766236.parquet"]}, {"split": "2024_01_25T08_41_28.082474", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-25T08-41-28.082474.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-25T08-41-28.082474.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_01_25T08_40_13.766236", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-25T08-40-13.766236.parquet"]}, {"split": "2024_01_25T08_41_28.082474", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-25T08-41-28.082474.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-25T08-41-28.082474.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_01_25T08_40_13.766236", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-25T08-40-13.766236.parquet"]}, {"split": "2024_01_25T08_41_28.082474", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-25T08-41-28.082474.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-25T08-41-28.082474.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_01_25T08_40_13.766236", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-25T08-40-13.766236.parquet"]}, {"split": "2024_01_25T08_41_28.082474", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-25T08-41-28.082474.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-25T08-41-28.082474.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_01_25T08_40_13.766236", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-25T08-40-13.766236.parquet"]}, {"split": "2024_01_25T08_41_28.082474", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-25T08-41-28.082474.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-25T08-41-28.082474.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_01_25T08_40_13.766236", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-25T08-40-13.766236.parquet"]}, {"split": "2024_01_25T08_41_28.082474", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-25T08-41-28.082474.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-25T08-41-28.082474.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_01_25T08_40_13.766236", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-25T08-40-13.766236.parquet"]}, {"split": "2024_01_25T08_41_28.082474", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-25T08-41-28.082474.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-25T08-41-28.082474.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_01_25T08_40_13.766236", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-25T08-40-13.766236.parquet"]}, {"split": "2024_01_25T08_41_28.082474", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-25T08-41-28.082474.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-25T08-41-28.082474.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_01_25T08_40_13.766236", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-25T08-40-13.766236.parquet"]}, {"split": "2024_01_25T08_41_28.082474", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-25T08-41-28.082474.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-25T08-41-28.082474.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_01_25T08_40_13.766236", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-25T08-40-13.766236.parquet"]}, {"split": "2024_01_25T08_41_28.082474", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-25T08-41-28.082474.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-25T08-41-28.082474.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_01_25T08_40_13.766236", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-25T08-40-13.766236.parquet"]}, {"split": "2024_01_25T08_41_28.082474", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-25T08-41-28.082474.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-25T08-41-28.082474.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_01_25T08_40_13.766236", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-25T08-40-13.766236.parquet"]}, {"split": "2024_01_25T08_41_28.082474", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-25T08-41-28.082474.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-25T08-41-28.082474.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_01_25T08_40_13.766236", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-25T08-40-13.766236.parquet"]}, {"split": "2024_01_25T08_41_28.082474", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-25T08-41-28.082474.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-25T08-41-28.082474.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_01_25T08_40_13.766236", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-25T08-40-13.766236.parquet"]}, {"split": "2024_01_25T08_41_28.082474", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-25T08-41-28.082474.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-25T08-41-28.082474.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_01_25T08_40_13.766236", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-25T08-40-13.766236.parquet"]}, {"split": "2024_01_25T08_41_28.082474", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-25T08-41-28.082474.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-25T08-41-28.082474.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_01_25T08_40_13.766236", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-25T08-40-13.766236.parquet"]}, {"split": "2024_01_25T08_41_28.082474", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-25T08-41-28.082474.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-25T08-41-28.082474.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_01_25T08_40_13.766236", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-25T08-40-13.766236.parquet"]}, {"split": "2024_01_25T08_41_28.082474", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-25T08-41-28.082474.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-25T08-41-28.082474.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_01_25T08_40_13.766236", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-25T08-40-13.766236.parquet"]}, {"split": "2024_01_25T08_41_28.082474", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-25T08-41-28.082474.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-25T08-41-28.082474.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_01_25T08_40_13.766236", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-25T08-40-13.766236.parquet"]}, {"split": "2024_01_25T08_41_28.082474", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-25T08-41-28.082474.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-25T08-41-28.082474.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_01_25T08_40_13.766236", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-25T08-40-13.766236.parquet"]}, {"split": "2024_01_25T08_41_28.082474", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-25T08-41-28.082474.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-25T08-41-28.082474.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_01_25T08_40_13.766236", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-25T08-40-13.766236.parquet"]}, {"split": "2024_01_25T08_41_28.082474", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-25T08-41-28.082474.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-25T08-41-28.082474.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_01_25T08_40_13.766236", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-25T08-40-13.766236.parquet"]}, {"split": "2024_01_25T08_41_28.082474", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-25T08-41-28.082474.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-25T08-41-28.082474.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_01_25T08_40_13.766236", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-25T08-40-13.766236.parquet"]}, {"split": "2024_01_25T08_41_28.082474", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-25T08-41-28.082474.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-25T08-41-28.082474.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_01_25T08_40_13.766236", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-25T08-40-13.766236.parquet"]}, {"split": "2024_01_25T08_41_28.082474", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-25T08-41-28.082474.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-25T08-41-28.082474.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_01_25T08_40_13.766236", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-25T08-40-13.766236.parquet"]}, {"split": "2024_01_25T08_41_28.082474", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-25T08-41-28.082474.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-25T08-41-28.082474.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_01_25T08_40_13.766236", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-25T08-40-13.766236.parquet"]}, {"split": "2024_01_25T08_41_28.082474", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-25T08-41-28.082474.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-25T08-41-28.082474.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_01_25T08_40_13.766236", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-25T08-40-13.766236.parquet"]}, {"split": "2024_01_25T08_41_28.082474", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-25T08-41-28.082474.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-25T08-41-28.082474.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_01_25T08_40_13.766236", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-25T08-40-13.766236.parquet"]}, {"split": "2024_01_25T08_41_28.082474", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-25T08-41-28.082474.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-25T08-41-28.082474.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_01_25T08_40_13.766236", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-25T08-40-13.766236.parquet"]}, {"split": "2024_01_25T08_41_28.082474", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-25T08-41-28.082474.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-25T08-41-28.082474.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_01_25T08_40_13.766236", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-25T08-40-13.766236.parquet"]}, {"split": "2024_01_25T08_41_28.082474", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-25T08-41-28.082474.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-25T08-41-28.082474.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_01_25T08_40_13.766236", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-25T08-40-13.766236.parquet"]}, {"split": "2024_01_25T08_41_28.082474", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-25T08-41-28.082474.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-25T08-41-28.082474.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_01_25T08_40_13.766236", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-25T08-40-13.766236.parquet"]}, {"split": "2024_01_25T08_41_28.082474", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-25T08-41-28.082474.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-25T08-41-28.082474.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_01_25T08_40_13.766236", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-25T08-40-13.766236.parquet"]}, {"split": "2024_01_25T08_41_28.082474", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-25T08-41-28.082474.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-25T08-41-28.082474.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_01_25T08_40_13.766236", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-25T08-40-13.766236.parquet"]}, {"split": "2024_01_25T08_41_28.082474", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-25T08-41-28.082474.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-25T08-41-28.082474.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_01_25T08_40_13.766236", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-25T08-40-13.766236.parquet"]}, {"split": "2024_01_25T08_41_28.082474", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-25T08-41-28.082474.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-25T08-41-28.082474.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_01_25T08_40_13.766236", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-25T08-40-13.766236.parquet"]}, {"split": "2024_01_25T08_41_28.082474", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-25T08-41-28.082474.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-25T08-41-28.082474.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_01_25T08_40_13.766236", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-25T08-40-13.766236.parquet"]}, {"split": "2024_01_25T08_41_28.082474", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-25T08-41-28.082474.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-25T08-41-28.082474.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_01_25T08_40_13.766236", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-25T08-40-13.766236.parquet"]}, {"split": "2024_01_25T08_41_28.082474", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-25T08-41-28.082474.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-25T08-41-28.082474.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_01_25T08_40_13.766236", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-25T08-40-13.766236.parquet"]}, {"split": "2024_01_25T08_41_28.082474", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-25T08-41-28.082474.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-25T08-41-28.082474.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_01_25T08_40_13.766236", "path": ["**/details_harness|winogrande|5_2024-01-25T08-40-13.766236.parquet"]}, {"split": "2024_01_25T08_41_28.082474", "path": ["**/details_harness|winogrande|5_2024-01-25T08-41-28.082474.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-01-25T08-41-28.082474.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_01_25T08_40_13.766236", "path": ["results_2024-01-25T08-40-13.766236.parquet"]}, {"split": "2024_01_25T08_41_28.082474", "path": ["results_2024-01-25T08-41-28.082474.parquet"]}, {"split": "latest", "path": ["results_2024-01-25T08-41-28.082474.parquet"]}]}]}
2024-01-25T08:43:49+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of ibivibiv/aegolius-acadicus-30b Dataset automatically created during the evaluation run of model ibivibiv/aegolius-acadicus-30b on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-01-25T08:41:28.082474(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of ibivibiv/aegolius-acadicus-30b\n\n\n\nDataset automatically created during the evaluation run of model ibivibiv/aegolius-acadicus-30b on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-25T08:41:28.082474(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of ibivibiv/aegolius-acadicus-30b\n\n\n\nDataset automatically created during the evaluation run of model ibivibiv/aegolius-acadicus-30b on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-25T08:41:28.082474(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ 6, 189, 68, 4, 40, 29, 3, 4, 9, 6, 5, 7, 4, 7, 10, 9, 5, 9, 8, 10, 46, 8, 7, 10, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of ibivibiv/aegolius-acadicus-30b\n\n\n\nDataset automatically created during the evaluation run of model ibivibiv/aegolius-acadicus-30b on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2024-01-25T08:41:28.082474(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):## Dataset Details### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:## Uses### Direct Use### Out-of-Scope Use## Dataset Structure## Dataset Creation### Curation Rationale### Source Data#### Data Collection and Processing#### Who are the source data producers?### Annotations [optional]#### Annotation process#### Who are the annotators?#### Personal and Sensitive Information## Bias, Risks, and Limitations### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:## Glossary [optional]## More Information [optional]## Dataset Card Authors [optional]" ]
[ -0.0504622757434845, 0.2019975483417511, -0.004924583714455366, 0.04003859683871269, 0.07695625722408295, -0.009424406103789806, 0.05561508610844612, 0.10511850565671921, 0.016065813601017, 0.1839643120765686, -0.022321151569485664, 0.09641770273447037, 0.06836975365877151, 0.12706485390663147, 0.0121743930503726, -0.14012515544891357, 0.022675488144159317, -0.08069494366645813, 0.07419134676456451, 0.07711326330900192, 0.07610543072223663, -0.08357109129428864, 0.06350847333669662, -0.03200558200478554, 0.02742081694304943, -0.014444170519709587, -0.08809969574213028, -0.03914463520050049, 0.10060615092515945, 0.11543737351894379, 0.03622664138674736, -0.009072293527424335, 0.012886442244052887, -0.26014259457588196, 0.012944784946739674, 0.09492944926023483, -0.010664011351764202, 0.03723866492509842, 0.13123078644275665, -0.07376265525817871, 0.08483030647039413, -0.03613504767417908, 0.0712055116891861, 0.045406147837638855, -0.12421992421150208, -0.13826625049114227, -0.14330391585826874, -0.0011070352047681808, 0.05919056385755539, 0.03718549758195877, -0.0235889982432127, 0.1553584635257721, -0.03466283529996872, 0.04843515530228615, 0.1314466893672943, -0.12126261740922928, -0.020719055086374283, 0.06169527769088745, 0.01267621386796236, 0.07979963719844818, -0.07858199626207352, -0.014471275731921196, 0.03214230760931969, 0.05308520048856735, 0.023020369932055473, 0.00917377695441246, -0.025995785370469093, 0.016498969867825508, -0.1480540782213211, -0.13375578820705414, 0.15651735663414001, 0.009990088641643524, -0.04214160516858101, -0.1850966215133667, -0.017060155048966408, 0.03132474422454834, 0.00751812057569623, -0.034706514328718185, 0.002662149490788579, -0.025148577988147736, 0.09681285917758942, -0.01586674153804779, -0.09767819195985794, -0.019350236281752586, 0.0017958494136109948, 0.07353087514638901, 0.02518114447593689, -0.008857110515236855, 0.017496440559625626, 0.10182269662618637, -0.008501768112182617, -0.07639479637145996, -0.06931082159280777, -0.06015467271208763, -0.09837136417627335, -0.04086752608418465, 0.014161152765154839, -0.07617968320846558, 0.032585881650447845, 0.23515890538692474, -0.017316723242402077, 0.012995692901313305, -0.10027658194303513, 0.017284618690609932, 0.1240241751074791, 0.04394812136888504, -0.07748287171125412, -0.0402044840157032, -0.02071913331747055, 0.028099721297621727, 0.03653174266219139, -0.01766546443104744, 0.012291138991713524, 0.0688365027308464, 0.02422739565372467, 0.10900290310382843, 0.12225143611431122, 0.032921984791755676, -0.08114586770534515, -0.02905215136706829, 0.23725824058055878, -0.1340043991804123, -0.02249632216989994, 0.010045023635029793, -0.04497770220041275, -0.1255168467760086, 0.09214021265506744, 0.0029221633449196815, -0.04790525510907173, 0.1295047253370285, -0.037560876458883286, -0.08696291595697403, -0.07153356075286865, -0.06559024751186371, 0.05811728164553642, 0.009036807343363762, -0.037026241421699524, -0.09701520204544067, -0.07277469336986542, -0.0821596086025238, 0.03424705192446709, -0.060620177537202835, -0.024699965491890907, 0.027148352935910225, 0.0017243309412151575, -0.017263874411582947, -0.019938787445425987, 0.0811469778418541, -0.059271421283483505, 0.030003957450389862, 0.0017947970191016793, 0.03131980076432228, 0.08819631487131119, 0.047542329877614975, -0.12008833885192871, 0.07921316474676132, -0.1259407252073288, 0.0857352539896965, -0.11425978690385818, -0.01414163876324892, -0.13745026290416718, -0.009551063179969788, -0.014370380900800228, 0.03032567910850048, -0.008427876979112625, 0.09461593627929688, -0.19485987722873688, 0.0023893502075225115, 0.1404174417257309, -0.10903006792068481, -0.10805714130401611, 0.08806577324867249, -0.04406648874282837, 0.06590326875448227, 0.04763444885611534, 0.1053660660982132, 0.09200498461723328, -0.0711241066455841, -0.10365165024995804, -0.08451609313488007, -0.025270989164710045, 0.1488807499408722, 0.0756925567984581, -0.0673534944653511, 0.12425772845745087, 0.03795362636446953, 0.0042465077713131905, -0.08274315297603607, -0.010103275999426842, -0.06195307895541191, -0.0065393103286623955, -0.07095067948102951, -0.06158490106463432, -0.005088461562991142, -0.07948119938373566, -0.0050667813047766685, -0.0822862833738327, -0.005766523070633411, 0.08925919979810715, -0.027706466615200043, 0.011701704002916813, -0.06293785572052002, 0.04794912785291672, -0.0009398619877174497, 0.014921216294169426, -0.21295182406902313, -0.10115273296833038, 0.029259221628308296, -0.14968858659267426, 0.045078471302986145, 0.04019327461719513, 0.012898559682071209, 0.047254037111997604, -0.009212816134095192, 0.02867746725678444, 0.028592020273208618, -0.011023887433111668, -0.02301384136080742, -0.15082292258739471, -0.0576082281768322, -0.08669006079435349, 0.06747127324342728, -0.14927931129932404, -0.016203729435801506, 0.06454222649335861, 0.1659315526485443, 0.021886415779590607, -0.07990945130586624, 0.07534588128328323, 0.01160153653472662, -0.04438675194978714, -0.05407804995775223, 0.0036969080101698637, -0.0259261354804039, 0.031065501272678375, 0.04503539204597473, -0.19386069476604462, -0.11181361973285675, 0.07355544716119766, 0.12051360309123993, -0.053526781499385834, -0.09497582167387009, -0.06780967116355896, -0.06419521570205688, -0.07946478575468063, -0.0571279302239418, 0.0725579559803009, 0.0846208930015564, 0.05197436362504959, -0.0703996941447258, -0.058100394904613495, 0.016117068007588387, 0.04825741425156593, -0.07466098666191101, 0.10695827752351761, 0.06818990409374237, -0.07557804137468338, 0.10345800966024399, -0.02237142063677311, 0.11067916452884674, 0.06205744296312332, 0.04383916035294533, -0.1007944718003273, 0.009347799234092236, 0.0518421046435833, 0.056351520121097565, 0.07238800823688507, -0.03768971189856529, 0.034500911831855774, 0.08349642157554626, -0.0038273269310593605, 0.03621721640229225, -0.05408884584903717, 0.03349348530173302, 0.045377686619758606, 0.00821355078369379, 0.030233610421419144, 0.013102834112942219, -0.0019403034821152687, 0.07622872292995453, 0.033312782645225525, 0.08574783802032471, -0.02360939234495163, -0.050790272653102875, -0.09918161481618881, 0.14239206910133362, -0.07585537433624268, -0.2806420624256134, -0.15455444157123566, -0.0547027550637722, -0.042079128324985504, -0.010390106588602066, 0.07026304304599762, -0.003198356134817004, -0.1049705371260643, -0.10288567841053009, 0.051731597632169724, 0.026448221877217293, -0.12236890941858292, -0.0648583248257637, 0.06559096276760101, 0.0008262841729447246, -0.16419564187526703, 0.04005375877022743, 0.04805418476462364, -0.043798111379146576, 0.006476415786892176, 0.07343069463968277, 0.12736594676971436, 0.06685163080692291, 0.06647869199514389, -0.03442049399018288, -0.012244170531630516, 0.1820899248123169, -0.1087457612156868, 0.02434784732758999, 0.11386961489915848, -0.05844391882419586, 0.06751447916030884, 0.17352129518985748, 0.01328534446656704, -0.10426168143749237, 0.0524073988199234, 0.08368197828531265, -0.06525658816099167, -0.2492641806602478, -0.1163727194070816, -0.023873694241046906, 0.007708719465881586, 0.12051857262849808, 0.05624561756849289, 0.013495979830622673, 0.02148638851940632, -0.12297077476978302, -0.002117346040904522, -0.050468672066926956, 0.07692394405603409, 0.06056865304708481, -0.008581791073083878, 0.04883052781224251, -0.03651278465986252, 0.016418498009443283, 0.12320356070995331, 0.0440811961889267, 0.15341408550739288, -0.038849104195833206, 0.17732834815979004, 0.09037903696298599, 0.08580378443002701, -0.03628557175397873, 0.04417479783296585, -0.011898375116288662, 0.0703069418668747, -0.01790563203394413, -0.10370056331157684, -0.0605243556201458, 0.1136220321059227, 0.013279976323246956, -0.08103175461292267, 0.03022276610136032, -0.07445932924747467, 0.030390359461307526, 0.20138679444789886, -0.017359072342514992, -0.15660616755485535, -0.06607411056756973, 0.05510145425796509, -0.0331851951777935, -0.09108714759349823, -0.020131079480051994, 0.07942649722099304, -0.1375712901353836, 0.01574251800775528, -0.03825702145695686, 0.08028345555067062, -0.14797721803188324, -0.02066858857870102, -0.024961212649941444, 0.028880542144179344, -0.004943809937685728, 0.11270954459905624, -0.14953109622001648, 0.1243496835231781, 0.001625836594030261, 0.004988792818039656, -0.11605972796678543, 0.04410326108336449, -0.040348660200834274, -0.050034236162900925, 0.1294340342283249, -0.016549432650208473, -0.08235811442136765, -0.06007837504148483, -0.11903795599937439, -0.007834509946405888, 0.0611405111849308, -0.11348558962345123, 0.10490666329860687, 0.038792744278907776, -0.01903492957353592, -0.03208595886826515, -0.018302010372281075, -0.11579683423042297, -0.23558633029460907, 0.1125413179397583, -0.09089789539575577, 0.07120687514543533, -0.05715677887201309, -0.04615652188658714, -0.04121284931898117, 0.1544673889875412, -0.08538632094860077, -0.05771001800894737, -0.11486922949552536, 0.03299584612250328, 0.17848853766918182, -0.04615141451358795, 0.06068119406700134, -0.03909781575202942, 0.17523568868637085, -0.021663906052708626, -0.04836621508002281, -0.001397398067638278, -0.10041884332895279, -0.1798040270805359, -0.05182415619492531, 0.10873757302761078, 0.0725865289568901, 0.018397485837340355, -0.0019453963031992316, 0.03592425212264061, 0.015378070995211601, -0.07913392782211304, 0.035852596163749695, 0.11573534458875656, 0.12950415909290314, 0.035411298274993896, -0.024433039128780365, -0.09329758584499359, -0.10975957661867142, -0.10901787877082825, 0.04969417676329613, 0.16068662703037262, -0.062381573021411896, 0.16232994198799133, 0.14590439200401306, -0.09837518632411957, -0.18253682553768158, -0.07015925645828247, 0.024078257381916046, -0.014163468964397907, 0.12082505226135254, -0.2011996954679489, 0.05984383821487427, 0.06512989848852158, -0.02139441668987274, 0.09425664693117142, -0.23824848234653473, -0.1330091804265976, 0.02412627823650837, 0.03829222545027733, -0.22653275728225708, -0.17066586017608643, -0.10671261698007584, -0.03741106390953064, -0.13438956439495087, 0.10753510147333145, 0.022255588322877884, 0.02591387927532196, -0.020223701372742653, 0.06783661246299744, 0.05864151567220688, -0.060735370963811874, 0.13111324608325958, -0.010647404007613659, 0.019366148859262466, -0.11064673960208893, -0.015719909220933914, 0.0011993402149528265, -0.0463358610868454, 0.09020698815584183, 0.03245769068598747, 0.05042405053973198, -0.09883622080087662, -0.03838833421468735, -0.05030446499586105, 0.0394948273897171, -0.059208571910858154, -0.0544704869389534, -0.0715148076415062, 0.08843252062797546, 0.08482689410448074, -0.006191398948431015, 0.04004138335585594, -0.039832331240177155, 0.03790118917822838, 0.2102324366569519, 0.11919921636581421, 0.03842124715447426, -0.1096377819776535, -0.027084480971097946, -0.012337063439190388, -0.0007029466796666384, -0.14346793293952942, 0.034043945372104645, 0.09130649268627167, 0.03519212082028389, 0.07472903281450272, -0.024332402274012566, -0.18854282796382904, 0.008248694241046906, 0.09084552526473999, -0.11212365329265594, -0.18746431171894073, 0.03513588756322861, 0.16391457617282867, -0.1567341387271881, -0.06258878111839294, 0.09823675453662872, 0.0016775375697761774, -0.03406139835715294, -0.006436218041926622, 0.07444871217012405, 0.05460416525602341, 0.10041748732328415, 0.01825304888188839, 0.04721623659133911, -0.06440688669681549, 0.09638523310422897, 0.1568172723054886, -0.1362829953432083, 0.021935999393463135, 0.03141438215970993, -0.06261670589447021, -0.07052825391292572, 0.010975360870361328, -0.014678718522191048, 0.018198417499661446, -0.04449348896741867, 0.017256904393434525, 0.0015899440040811896, 0.04707587882876396, 0.13623392581939697, 0.007704839576035738, 0.03687828406691551, 0.025633379817008972, -0.008998166769742966, -0.07142364978790283, 0.09922380745410919, 0.02738366462290287, 0.04313820227980614, -0.04098641499876976, 0.013787883333861828, 0.010486164130270481, -0.025484507903456688, 0.015826845541596413, -0.040086790919303894, -0.07171210646629333, 0.00012072451499989256, -0.14789803326129913, 0.0505460761487484, -0.08102212101221085, 0.004346818663179874, -0.004750867839902639, -0.025426195934414864, -0.00637881038710475, 0.006298459134995937, -0.0671476200222969, -0.044082581996917725, -0.04521281272172928, 0.1337427794933319, -0.19306007027626038, -0.01720319502055645, 0.08797592669725418, -0.06407442688941956, 0.0825444906949997, -0.009749027900397778, -0.020171113312244415, 0.0046662441454827785, -0.0717850774526596, -0.0011483331909403205, -0.020942511036992073, 0.0625179335474968, 0.014019106514751911, -0.15210023522377014, -0.014731267467141151, 0.0025979424826800823, -0.08706676959991455, -0.005186386406421661, 0.052426256239414215, -0.1511603444814682, 0.027522308751940727, 0.07423566281795502, -0.03706708177924156, -0.045604780316352844, 0.046626534312963486, 0.04300699383020401, 0.0017523322021588683, 0.09452052414417267, -0.003163046669214964, 0.04179152473807335, -0.14833146333694458, -0.045862507075071335, -0.0017968934262171388, -0.0033906050957739353, 0.026182301342487335, 0.021468129009008408, 0.032274726778268814, 0.002595606492832303, 0.22353194653987885, -0.000054372576414607465, 0.06731247901916504, 0.029719823971390724, -0.016623716801404953, -0.027728289365768433, 0.032220833003520966, 0.004281322006136179, 0.015831366181373596, 0.02446538768708706, 0.022565647959709167, -0.0187818706035614, -0.051716405898332596, -0.02522220090031624, 0.04985411465167999, 0.15629148483276367, 0.174222931265831, -0.043751608580350876, 0.07181071490049362, -0.16377700865268707, -0.04964205250144005, 0.0034522926434874535, -0.025911519303917885, 0.05613434314727783, -0.06223204359412193, 0.04246499016880989, 0.06734741479158401, -0.10012786835432053, 0.14464247226715088, -0.06593585014343262, -0.02237139642238617, -0.03105076774954796, -0.14587241411209106, -0.041509196162223816, 0.0034472595434635878, 0.00760698551312089, -0.100432388484478, 0.09945035725831985, 0.14367543160915375, -0.009190268814563751, 0.0010081582004204392, 0.07506965100765228, -0.05926446244120598, -0.05646505951881409, -0.023455072194337845, 0.0018067921046167612, 0.016944026574492455, -0.005088651552796364, 0.07687526941299438, 0.01261368952691555, 0.06400340050458908, 0.056947797536849976, 0.0998772382736206, 0.04658152535557747, 0.012841545976698399, -0.03641420230269432, -0.07221977412700653, -0.0038478109054267406, -0.007749981712549925, -0.04875306412577629, 0.20148023962974548, 0.05147610232234001, 0.017256958410143852, 0.010691765695810318, 0.20636749267578125, 0.004935127682983875, -0.05426396429538727, -0.13613276183605194, 0.12141993641853333, -0.009039782918989658, 0.012098358944058418, 0.015934482216835022, -0.12606213986873627, 0.027887707576155663, 0.1695059984922409, 0.10783292353153229, 0.04385373368859291, 0.004557765554636717, 0.03319936618208885, 0.023732969537377357, -0.019032027572393417, 0.03363504633307457, 0.03666974976658821, 0.21214596927165985, -0.06638990342617035, 0.04660363495349884, -0.01232327800244093, 0.0044366721995174885, -0.01790786162018776, 0.08892083168029785, -0.0348324291408062, 0.010443951934576035, -0.06187792867422104, 0.10170427709817886, -0.039263270795345306, -0.2508021891117096, -0.019794823601841927, -0.09994206577539444, -0.1292199194431305, -0.02851569466292858, 0.006093502044677734, -0.019298700615763664, 0.04430505633354187, 0.033651258796453476, -0.035686567425727844, 0.18994104862213135, 0.008397148922085762, -0.08410407602787018, -0.0645948275923729, 0.06448622047901154, -0.012089451774954796, 0.2557986080646515, -0.011221565306186676, 0.08080486208200455, 0.09509057551622391, -0.021316498517990112, -0.1467328518629074, -0.005389771889895201, 0.10111196339130402, -0.03763718903064728, 0.056088004261255264, 0.15712963044643402, -0.02957361750304699, 0.14799822866916656, 0.04253069683909416, -0.031689174473285675, 0.06539390981197357, 0.07057232409715652, 0.04358796402812004, -0.08735419809818268, 0.07815571874380112, -0.0811677947640419, 0.13901591300964355, 0.11110106855630875, -0.027465928345918655, -0.008938513696193695, -0.04974919930100441, 0.0693865418434143, -0.021278847008943558, 0.12869150936603546, -0.011556931771337986, -0.1586514562368393, 0.03131527826189995, 0.005283316131681204, 0.04928554967045784, -0.24327294528484344, -0.06284455955028534, 0.12103688716888428, -0.04709438234567642, 0.011258138343691826, 0.08190181106328964, 0.04808232560753822, 0.007093081250786781, -0.06854404509067535, -0.09254761785268784, -0.0002788143465295434, 0.11747150123119354, -0.08690491318702698, -0.041602399200201035 ]
0da3c81dd08c7e0ea574c7e9badfb381f980cbed
# Dataset Card for Evaluation run of abacusai/MetaMath-Bagel-DPO-34B <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [abacusai/MetaMath-Bagel-DPO-34B](https://huggingface.co/abacusai/MetaMath-Bagel-DPO-34B) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_abacusai__MetaMath-Bagel-DPO-34B", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-01-25T08:50:29.351821](https://huggingface.co/datasets/open-llm-leaderboard/details_abacusai__MetaMath-Bagel-DPO-34B/blob/main/results_2024-01-25T08-50-29.351821.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.7616286037261435, "acc_stderr": 0.028274831508083145, "acc_norm": 0.7653258289687221, "acc_norm_stderr": 0.028816037077233354, "mc1": 0.48592411260709917, "mc1_stderr": 0.01749656371704279, "mc2": 0.6543983740751951, "mc2_stderr": 0.014445923537119106 }, "harness|arc:challenge|25": { "acc": 0.643344709897611, "acc_stderr": 0.013998056902620192, "acc_norm": 0.681740614334471, "acc_norm_stderr": 0.013611993916971451 }, "harness|hellaswag|10": { "acc": 0.6416052579167496, "acc_stderr": 0.004785488626807584, "acc_norm": 0.8422624975104561, "acc_norm_stderr": 0.00363749770893404 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.45, "acc_stderr": 0.05, "acc_norm": 0.45, "acc_norm_stderr": 0.05 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.7407407407407407, "acc_stderr": 0.03785714465066653, "acc_norm": 0.7407407407407407, "acc_norm_stderr": 0.03785714465066653 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.8881578947368421, "acc_stderr": 0.02564834125169361, "acc_norm": 0.8881578947368421, "acc_norm_stderr": 0.02564834125169361 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.77, "acc_stderr": 0.04229525846816505, "acc_norm": 0.77, "acc_norm_stderr": 0.04229525846816505 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.8075471698113208, "acc_stderr": 0.024262979839372274, "acc_norm": 0.8075471698113208, "acc_norm_stderr": 0.024262979839372274 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.9027777777777778, "acc_stderr": 0.024774516250440182, "acc_norm": 0.9027777777777778, "acc_norm_stderr": 0.024774516250440182 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.48, "acc_stderr": 0.050211673156867795, "acc_norm": 0.48, "acc_norm_stderr": 0.050211673156867795 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.6, "acc_stderr": 0.04923659639173309, "acc_norm": 0.6, "acc_norm_stderr": 0.04923659639173309 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.44, "acc_stderr": 0.0498887651569859, "acc_norm": 0.44, "acc_norm_stderr": 0.0498887651569859 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.7398843930635838, "acc_stderr": 0.03345036916788991, "acc_norm": 0.7398843930635838, "acc_norm_stderr": 0.03345036916788991 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.5686274509803921, "acc_stderr": 0.04928099597287534, "acc_norm": 0.5686274509803921, "acc_norm_stderr": 0.04928099597287534 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.8, "acc_stderr": 0.04020151261036845, "acc_norm": 0.8, "acc_norm_stderr": 0.04020151261036845 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.774468085106383, "acc_stderr": 0.02732107841738754, "acc_norm": 0.774468085106383, "acc_norm_stderr": 0.02732107841738754 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.5789473684210527, "acc_stderr": 0.046446020912223177, "acc_norm": 0.5789473684210527, "acc_norm_stderr": 0.046446020912223177 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.7517241379310344, "acc_stderr": 0.036001056927277696, "acc_norm": 0.7517241379310344, "acc_norm_stderr": 0.036001056927277696 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.7486772486772487, "acc_stderr": 0.022340482339643898, "acc_norm": 0.7486772486772487, "acc_norm_stderr": 0.022340482339643898 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.5555555555555556, "acc_stderr": 0.04444444444444449, "acc_norm": 0.5555555555555556, "acc_norm_stderr": 0.04444444444444449 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.6, "acc_stderr": 0.049236596391733084, "acc_norm": 0.6, "acc_norm_stderr": 0.049236596391733084 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.896774193548387, "acc_stderr": 0.017308381281034523, "acc_norm": 0.896774193548387, "acc_norm_stderr": 0.017308381281034523 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.6945812807881774, "acc_stderr": 0.032406615658684086, "acc_norm": 0.6945812807881774, "acc_norm_stderr": 0.032406615658684086 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.77, "acc_stderr": 0.04229525846816505, "acc_norm": 0.77, "acc_norm_stderr": 0.04229525846816505 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.8545454545454545, "acc_stderr": 0.027530196355066584, "acc_norm": 0.8545454545454545, "acc_norm_stderr": 0.027530196355066584 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.9393939393939394, "acc_stderr": 0.01699999492742163, "acc_norm": 0.9393939393939394, "acc_norm_stderr": 0.01699999492742163 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.9689119170984456, "acc_stderr": 0.012525310625527029, "acc_norm": 0.9689119170984456, "acc_norm_stderr": 0.012525310625527029 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.8051282051282052, "acc_stderr": 0.020083167595181393, "acc_norm": 0.8051282051282052, "acc_norm_stderr": 0.020083167595181393 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.45185185185185184, "acc_stderr": 0.030343862998512626, "acc_norm": 0.45185185185185184, "acc_norm_stderr": 0.030343862998512626 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.865546218487395, "acc_stderr": 0.02215937307274444, "acc_norm": 0.865546218487395, "acc_norm_stderr": 0.02215937307274444 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.5099337748344371, "acc_stderr": 0.04081677107248437, "acc_norm": 0.5099337748344371, "acc_norm_stderr": 0.04081677107248437 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.9211009174311927, "acc_stderr": 0.011558198113769598, "acc_norm": 0.9211009174311927, "acc_norm_stderr": 0.011558198113769598 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.6481481481481481, "acc_stderr": 0.03256850570293648, "acc_norm": 0.6481481481481481, "acc_norm_stderr": 0.03256850570293648 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.9117647058823529, "acc_stderr": 0.01990739979131694, "acc_norm": 0.9117647058823529, "acc_norm_stderr": 0.01990739979131694 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.9029535864978903, "acc_stderr": 0.01926932302564027, "acc_norm": 0.9029535864978903, "acc_norm_stderr": 0.01926932302564027 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.8071748878923767, "acc_stderr": 0.026478240960489365, "acc_norm": 0.8071748878923767, "acc_norm_stderr": 0.026478240960489365 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.8625954198473282, "acc_stderr": 0.030194823996804475, "acc_norm": 0.8625954198473282, "acc_norm_stderr": 0.030194823996804475 }, "harness|hendrycksTest-international_law|5": { "acc": 0.8760330578512396, "acc_stderr": 0.03008309871603522, "acc_norm": 0.8760330578512396, "acc_norm_stderr": 0.03008309871603522 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.8981481481481481, "acc_stderr": 0.02923927267563275, "acc_norm": 0.8981481481481481, "acc_norm_stderr": 0.02923927267563275 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.8588957055214724, "acc_stderr": 0.027351605518389752, "acc_norm": 0.8588957055214724, "acc_norm_stderr": 0.027351605518389752 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.5089285714285714, "acc_stderr": 0.04745033255489123, "acc_norm": 0.5089285714285714, "acc_norm_stderr": 0.04745033255489123 }, "harness|hendrycksTest-management|5": { "acc": 0.8932038834951457, "acc_stderr": 0.030581088928331356, "acc_norm": 0.8932038834951457, "acc_norm_stderr": 0.030581088928331356 }, "harness|hendrycksTest-marketing|5": { "acc": 0.9401709401709402, "acc_stderr": 0.015537514263253858, "acc_norm": 0.9401709401709402, "acc_norm_stderr": 0.015537514263253858 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.89, "acc_stderr": 0.03144660377352202, "acc_norm": 0.89, "acc_norm_stderr": 0.03144660377352202 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.9118773946360154, "acc_stderr": 0.010136978203312642, "acc_norm": 0.9118773946360154, "acc_norm_stderr": 0.010136978203312642 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.8294797687861272, "acc_stderr": 0.020247961569303728, "acc_norm": 0.8294797687861272, "acc_norm_stderr": 0.020247961569303728 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.8055865921787709, "acc_stderr": 0.01323580809674227, "acc_norm": 0.8055865921787709, "acc_norm_stderr": 0.01323580809674227 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.8464052287581699, "acc_stderr": 0.02064559791041877, "acc_norm": 0.8464052287581699, "acc_norm_stderr": 0.02064559791041877 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.8006430868167203, "acc_stderr": 0.022691033780549656, "acc_norm": 0.8006430868167203, "acc_norm_stderr": 0.022691033780549656 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.8641975308641975, "acc_stderr": 0.019061588181505405, "acc_norm": 0.8641975308641975, "acc_norm_stderr": 0.019061588181505405 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.6382978723404256, "acc_stderr": 0.028663820147199485, "acc_norm": 0.6382978723404256, "acc_norm_stderr": 0.028663820147199485 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.5873533246414603, "acc_stderr": 0.01257383663379902, "acc_norm": 0.5873533246414603, "acc_norm_stderr": 0.01257383663379902 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.8345588235294118, "acc_stderr": 0.022571771025494746, "acc_norm": 0.8345588235294118, "acc_norm_stderr": 0.022571771025494746 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.8169934640522876, "acc_stderr": 0.01564306991127334, "acc_norm": 0.8169934640522876, "acc_norm_stderr": 0.01564306991127334 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.7090909090909091, "acc_stderr": 0.04350271442923243, "acc_norm": 0.7090909090909091, "acc_norm_stderr": 0.04350271442923243 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.8408163265306122, "acc_stderr": 0.023420972069166344, "acc_norm": 0.8408163265306122, "acc_norm_stderr": 0.023420972069166344 }, "harness|hendrycksTest-sociology|5": { "acc": 0.900497512437811, "acc_stderr": 0.021166216304659407, "acc_norm": 0.900497512437811, "acc_norm_stderr": 0.021166216304659407 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.91, "acc_stderr": 0.02876234912646613, "acc_norm": 0.91, "acc_norm_stderr": 0.02876234912646613 }, "harness|hendrycksTest-virology|5": { "acc": 0.5843373493975904, "acc_stderr": 0.03836722176598053, "acc_norm": 0.5843373493975904, "acc_norm_stderr": 0.03836722176598053 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.8771929824561403, "acc_stderr": 0.02517298435015577, "acc_norm": 0.8771929824561403, "acc_norm_stderr": 0.02517298435015577 }, "harness|truthfulqa:mc|0": { "mc1": 0.48592411260709917, "mc1_stderr": 0.01749656371704279, "mc2": 0.6543983740751951, "mc2_stderr": 0.014445923537119106 }, "harness|winogrande|5": { "acc": 0.8224151539068666, "acc_stderr": 0.010740676861359242 }, "harness|gsm8k|5": { "acc": 0.7217589082638363, "acc_stderr": 0.01234380367142268 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_abacusai__MetaMath-Bagel-DPO-34B
[ "region:us" ]
2024-01-25T08:52:40+00:00
{"pretty_name": "Evaluation run of abacusai/MetaMath-Bagel-DPO-34B", "dataset_summary": "Dataset automatically created during the evaluation run of model [abacusai/MetaMath-Bagel-DPO-34B](https://huggingface.co/abacusai/MetaMath-Bagel-DPO-34B) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_abacusai__MetaMath-Bagel-DPO-34B\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-01-25T08:50:29.351821](https://huggingface.co/datasets/open-llm-leaderboard/details_abacusai__MetaMath-Bagel-DPO-34B/blob/main/results_2024-01-25T08-50-29.351821.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.7616286037261435,\n \"acc_stderr\": 0.028274831508083145,\n \"acc_norm\": 0.7653258289687221,\n \"acc_norm_stderr\": 0.028816037077233354,\n \"mc1\": 0.48592411260709917,\n \"mc1_stderr\": 0.01749656371704279,\n \"mc2\": 0.6543983740751951,\n \"mc2_stderr\": 0.014445923537119106\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.643344709897611,\n \"acc_stderr\": 0.013998056902620192,\n \"acc_norm\": 0.681740614334471,\n \"acc_norm_stderr\": 0.013611993916971451\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6416052579167496,\n \"acc_stderr\": 0.004785488626807584,\n \"acc_norm\": 0.8422624975104561,\n \"acc_norm_stderr\": 0.00363749770893404\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.45,\n \"acc_stderr\": 0.05,\n \"acc_norm\": 0.45,\n \"acc_norm_stderr\": 0.05\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.7407407407407407,\n \"acc_stderr\": 0.03785714465066653,\n \"acc_norm\": 0.7407407407407407,\n \"acc_norm_stderr\": 0.03785714465066653\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.8881578947368421,\n \"acc_stderr\": 0.02564834125169361,\n \"acc_norm\": 0.8881578947368421,\n \"acc_norm_stderr\": 0.02564834125169361\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.77,\n \"acc_stderr\": 0.04229525846816505,\n \"acc_norm\": 0.77,\n \"acc_norm_stderr\": 0.04229525846816505\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.8075471698113208,\n \"acc_stderr\": 0.024262979839372274,\n \"acc_norm\": 0.8075471698113208,\n \"acc_norm_stderr\": 0.024262979839372274\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.9027777777777778,\n \"acc_stderr\": 0.024774516250440182,\n \"acc_norm\": 0.9027777777777778,\n \"acc_norm_stderr\": 0.024774516250440182\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.48,\n \"acc_stderr\": 0.050211673156867795,\n \"acc_norm\": 0.48,\n \"acc_norm_stderr\": 0.050211673156867795\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.6,\n \"acc_stderr\": 0.04923659639173309,\n \"acc_norm\": 0.6,\n \"acc_norm_stderr\": 0.04923659639173309\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.44,\n \"acc_stderr\": 0.0498887651569859,\n \"acc_norm\": 0.44,\n \"acc_norm_stderr\": 0.0498887651569859\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.7398843930635838,\n \"acc_stderr\": 0.03345036916788991,\n \"acc_norm\": 0.7398843930635838,\n \"acc_norm_stderr\": 0.03345036916788991\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.5686274509803921,\n \"acc_stderr\": 0.04928099597287534,\n \"acc_norm\": 0.5686274509803921,\n \"acc_norm_stderr\": 0.04928099597287534\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.8,\n \"acc_stderr\": 0.04020151261036845,\n \"acc_norm\": 0.8,\n \"acc_norm_stderr\": 0.04020151261036845\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.774468085106383,\n \"acc_stderr\": 0.02732107841738754,\n \"acc_norm\": 0.774468085106383,\n \"acc_norm_stderr\": 0.02732107841738754\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.5789473684210527,\n \"acc_stderr\": 0.046446020912223177,\n \"acc_norm\": 0.5789473684210527,\n \"acc_norm_stderr\": 0.046446020912223177\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.7517241379310344,\n \"acc_stderr\": 0.036001056927277696,\n \"acc_norm\": 0.7517241379310344,\n \"acc_norm_stderr\": 0.036001056927277696\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.7486772486772487,\n \"acc_stderr\": 0.022340482339643898,\n \"acc_norm\": 0.7486772486772487,\n \"acc_norm_stderr\": 0.022340482339643898\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.5555555555555556,\n \"acc_stderr\": 0.04444444444444449,\n \"acc_norm\": 0.5555555555555556,\n \"acc_norm_stderr\": 0.04444444444444449\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.6,\n \"acc_stderr\": 0.049236596391733084,\n \"acc_norm\": 0.6,\n \"acc_norm_stderr\": 0.049236596391733084\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.896774193548387,\n \"acc_stderr\": 0.017308381281034523,\n \"acc_norm\": 0.896774193548387,\n \"acc_norm_stderr\": 0.017308381281034523\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.6945812807881774,\n \"acc_stderr\": 0.032406615658684086,\n \"acc_norm\": 0.6945812807881774,\n \"acc_norm_stderr\": 0.032406615658684086\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.77,\n \"acc_stderr\": 0.04229525846816505,\n \"acc_norm\": 0.77,\n \"acc_norm_stderr\": 0.04229525846816505\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.8545454545454545,\n \"acc_stderr\": 0.027530196355066584,\n \"acc_norm\": 0.8545454545454545,\n \"acc_norm_stderr\": 0.027530196355066584\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.9393939393939394,\n \"acc_stderr\": 0.01699999492742163,\n \"acc_norm\": 0.9393939393939394,\n \"acc_norm_stderr\": 0.01699999492742163\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.9689119170984456,\n \"acc_stderr\": 0.012525310625527029,\n \"acc_norm\": 0.9689119170984456,\n \"acc_norm_stderr\": 0.012525310625527029\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.8051282051282052,\n \"acc_stderr\": 0.020083167595181393,\n \"acc_norm\": 0.8051282051282052,\n \"acc_norm_stderr\": 0.020083167595181393\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.45185185185185184,\n \"acc_stderr\": 0.030343862998512626,\n \"acc_norm\": 0.45185185185185184,\n \"acc_norm_stderr\": 0.030343862998512626\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.865546218487395,\n \"acc_stderr\": 0.02215937307274444,\n \"acc_norm\": 0.865546218487395,\n \"acc_norm_stderr\": 0.02215937307274444\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.5099337748344371,\n \"acc_stderr\": 0.04081677107248437,\n \"acc_norm\": 0.5099337748344371,\n \"acc_norm_stderr\": 0.04081677107248437\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.9211009174311927,\n \"acc_stderr\": 0.011558198113769598,\n \"acc_norm\": 0.9211009174311927,\n \"acc_norm_stderr\": 0.011558198113769598\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.6481481481481481,\n \"acc_stderr\": 0.03256850570293648,\n \"acc_norm\": 0.6481481481481481,\n \"acc_norm_stderr\": 0.03256850570293648\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.9117647058823529,\n \"acc_stderr\": 0.01990739979131694,\n \"acc_norm\": 0.9117647058823529,\n \"acc_norm_stderr\": 0.01990739979131694\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.9029535864978903,\n \"acc_stderr\": 0.01926932302564027,\n \"acc_norm\": 0.9029535864978903,\n \"acc_norm_stderr\": 0.01926932302564027\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.8071748878923767,\n \"acc_stderr\": 0.026478240960489365,\n \"acc_norm\": 0.8071748878923767,\n \"acc_norm_stderr\": 0.026478240960489365\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.8625954198473282,\n \"acc_stderr\": 0.030194823996804475,\n \"acc_norm\": 0.8625954198473282,\n \"acc_norm_stderr\": 0.030194823996804475\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.8760330578512396,\n \"acc_stderr\": 0.03008309871603522,\n \"acc_norm\": 0.8760330578512396,\n \"acc_norm_stderr\": 0.03008309871603522\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.8981481481481481,\n \"acc_stderr\": 0.02923927267563275,\n \"acc_norm\": 0.8981481481481481,\n \"acc_norm_stderr\": 0.02923927267563275\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.8588957055214724,\n \"acc_stderr\": 0.027351605518389752,\n \"acc_norm\": 0.8588957055214724,\n \"acc_norm_stderr\": 0.027351605518389752\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.5089285714285714,\n \"acc_stderr\": 0.04745033255489123,\n \"acc_norm\": 0.5089285714285714,\n \"acc_norm_stderr\": 0.04745033255489123\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.8932038834951457,\n \"acc_stderr\": 0.030581088928331356,\n \"acc_norm\": 0.8932038834951457,\n \"acc_norm_stderr\": 0.030581088928331356\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.9401709401709402,\n \"acc_stderr\": 0.015537514263253858,\n \"acc_norm\": 0.9401709401709402,\n \"acc_norm_stderr\": 0.015537514263253858\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.89,\n \"acc_stderr\": 0.03144660377352202,\n \"acc_norm\": 0.89,\n \"acc_norm_stderr\": 0.03144660377352202\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.9118773946360154,\n \"acc_stderr\": 0.010136978203312642,\n \"acc_norm\": 0.9118773946360154,\n \"acc_norm_stderr\": 0.010136978203312642\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.8294797687861272,\n \"acc_stderr\": 0.020247961569303728,\n \"acc_norm\": 0.8294797687861272,\n \"acc_norm_stderr\": 0.020247961569303728\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.8055865921787709,\n \"acc_stderr\": 0.01323580809674227,\n \"acc_norm\": 0.8055865921787709,\n \"acc_norm_stderr\": 0.01323580809674227\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.8464052287581699,\n \"acc_stderr\": 0.02064559791041877,\n \"acc_norm\": 0.8464052287581699,\n \"acc_norm_stderr\": 0.02064559791041877\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.8006430868167203,\n \"acc_stderr\": 0.022691033780549656,\n \"acc_norm\": 0.8006430868167203,\n \"acc_norm_stderr\": 0.022691033780549656\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.8641975308641975,\n \"acc_stderr\": 0.019061588181505405,\n \"acc_norm\": 0.8641975308641975,\n \"acc_norm_stderr\": 0.019061588181505405\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.6382978723404256,\n \"acc_stderr\": 0.028663820147199485,\n \"acc_norm\": 0.6382978723404256,\n \"acc_norm_stderr\": 0.028663820147199485\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.5873533246414603,\n \"acc_stderr\": 0.01257383663379902,\n \"acc_norm\": 0.5873533246414603,\n \"acc_norm_stderr\": 0.01257383663379902\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.8345588235294118,\n \"acc_stderr\": 0.022571771025494746,\n \"acc_norm\": 0.8345588235294118,\n \"acc_norm_stderr\": 0.022571771025494746\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.8169934640522876,\n \"acc_stderr\": 0.01564306991127334,\n \"acc_norm\": 0.8169934640522876,\n \"acc_norm_stderr\": 0.01564306991127334\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.7090909090909091,\n \"acc_stderr\": 0.04350271442923243,\n \"acc_norm\": 0.7090909090909091,\n \"acc_norm_stderr\": 0.04350271442923243\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.8408163265306122,\n \"acc_stderr\": 0.023420972069166344,\n \"acc_norm\": 0.8408163265306122,\n \"acc_norm_stderr\": 0.023420972069166344\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.900497512437811,\n \"acc_stderr\": 0.021166216304659407,\n \"acc_norm\": 0.900497512437811,\n \"acc_norm_stderr\": 0.021166216304659407\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.91,\n \"acc_stderr\": 0.02876234912646613,\n \"acc_norm\": 0.91,\n \"acc_norm_stderr\": 0.02876234912646613\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5843373493975904,\n \"acc_stderr\": 0.03836722176598053,\n \"acc_norm\": 0.5843373493975904,\n \"acc_norm_stderr\": 0.03836722176598053\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.8771929824561403,\n \"acc_stderr\": 0.02517298435015577,\n \"acc_norm\": 0.8771929824561403,\n \"acc_norm_stderr\": 0.02517298435015577\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.48592411260709917,\n \"mc1_stderr\": 0.01749656371704279,\n \"mc2\": 0.6543983740751951,\n \"mc2_stderr\": 0.014445923537119106\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.8224151539068666,\n \"acc_stderr\": 0.010740676861359242\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.7217589082638363,\n \"acc_stderr\": 0.01234380367142268\n }\n}\n```", "repo_url": "https://huggingface.co/abacusai/MetaMath-Bagel-DPO-34B", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_01_25T08_50_29.351821", "path": ["**/details_harness|arc:challenge|25_2024-01-25T08-50-29.351821.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-01-25T08-50-29.351821.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_01_25T08_50_29.351821", "path": ["**/details_harness|gsm8k|5_2024-01-25T08-50-29.351821.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-01-25T08-50-29.351821.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_01_25T08_50_29.351821", "path": ["**/details_harness|hellaswag|10_2024-01-25T08-50-29.351821.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-01-25T08-50-29.351821.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_01_25T08_50_29.351821", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-25T08-50-29.351821.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-25T08-50-29.351821.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-25T08-50-29.351821.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-25T08-50-29.351821.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-25T08-50-29.351821.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-25T08-50-29.351821.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-25T08-50-29.351821.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-25T08-50-29.351821.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-25T08-50-29.351821.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-25T08-50-29.351821.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-25T08-50-29.351821.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-25T08-50-29.351821.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-25T08-50-29.351821.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-25T08-50-29.351821.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-25T08-50-29.351821.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-25T08-50-29.351821.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-25T08-50-29.351821.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-25T08-50-29.351821.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-25T08-50-29.351821.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-25T08-50-29.351821.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-25T08-50-29.351821.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-25T08-50-29.351821.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-25T08-50-29.351821.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-25T08-50-29.351821.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-25T08-50-29.351821.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-25T08-50-29.351821.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-25T08-50-29.351821.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-25T08-50-29.351821.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-25T08-50-29.351821.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-25T08-50-29.351821.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-25T08-50-29.351821.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-25T08-50-29.351821.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-25T08-50-29.351821.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-25T08-50-29.351821.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-25T08-50-29.351821.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-25T08-50-29.351821.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-25T08-50-29.351821.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-25T08-50-29.351821.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-25T08-50-29.351821.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-25T08-50-29.351821.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-25T08-50-29.351821.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-25T08-50-29.351821.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-25T08-50-29.351821.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-25T08-50-29.351821.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-25T08-50-29.351821.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-25T08-50-29.351821.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-25T08-50-29.351821.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-25T08-50-29.351821.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-25T08-50-29.351821.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-25T08-50-29.351821.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-25T08-50-29.351821.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-25T08-50-29.351821.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-25T08-50-29.351821.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-25T08-50-29.351821.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-25T08-50-29.351821.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-25T08-50-29.351821.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-25T08-50-29.351821.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-25T08-50-29.351821.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-25T08-50-29.351821.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-25T08-50-29.351821.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-25T08-50-29.351821.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-25T08-50-29.351821.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-25T08-50-29.351821.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-25T08-50-29.351821.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-25T08-50-29.351821.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-25T08-50-29.351821.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-25T08-50-29.351821.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-25T08-50-29.351821.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-25T08-50-29.351821.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-25T08-50-29.351821.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-25T08-50-29.351821.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-25T08-50-29.351821.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-25T08-50-29.351821.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-25T08-50-29.351821.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-25T08-50-29.351821.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-25T08-50-29.351821.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-25T08-50-29.351821.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-25T08-50-29.351821.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-25T08-50-29.351821.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-25T08-50-29.351821.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-25T08-50-29.351821.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-25T08-50-29.351821.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-25T08-50-29.351821.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-25T08-50-29.351821.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-25T08-50-29.351821.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-25T08-50-29.351821.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-25T08-50-29.351821.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-25T08-50-29.351821.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-25T08-50-29.351821.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-25T08-50-29.351821.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-25T08-50-29.351821.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-25T08-50-29.351821.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-25T08-50-29.351821.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-25T08-50-29.351821.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-25T08-50-29.351821.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-25T08-50-29.351821.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-25T08-50-29.351821.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-25T08-50-29.351821.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-25T08-50-29.351821.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-25T08-50-29.351821.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-25T08-50-29.351821.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-25T08-50-29.351821.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-25T08-50-29.351821.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-25T08-50-29.351821.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-25T08-50-29.351821.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-25T08-50-29.351821.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-25T08-50-29.351821.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-25T08-50-29.351821.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-25T08-50-29.351821.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-25T08-50-29.351821.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-25T08-50-29.351821.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-25T08-50-29.351821.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-25T08-50-29.351821.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-25T08-50-29.351821.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_01_25T08_50_29.351821", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-25T08-50-29.351821.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-25T08-50-29.351821.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_01_25T08_50_29.351821", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-25T08-50-29.351821.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-25T08-50-29.351821.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_01_25T08_50_29.351821", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-25T08-50-29.351821.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-25T08-50-29.351821.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_01_25T08_50_29.351821", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-25T08-50-29.351821.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-25T08-50-29.351821.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_01_25T08_50_29.351821", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-25T08-50-29.351821.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-25T08-50-29.351821.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_01_25T08_50_29.351821", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-25T08-50-29.351821.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-25T08-50-29.351821.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_01_25T08_50_29.351821", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-25T08-50-29.351821.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-25T08-50-29.351821.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_01_25T08_50_29.351821", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-25T08-50-29.351821.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-25T08-50-29.351821.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_01_25T08_50_29.351821", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-25T08-50-29.351821.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-25T08-50-29.351821.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_01_25T08_50_29.351821", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-25T08-50-29.351821.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-25T08-50-29.351821.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_01_25T08_50_29.351821", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-25T08-50-29.351821.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-25T08-50-29.351821.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_01_25T08_50_29.351821", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-25T08-50-29.351821.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-25T08-50-29.351821.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_01_25T08_50_29.351821", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-25T08-50-29.351821.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-25T08-50-29.351821.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_01_25T08_50_29.351821", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-25T08-50-29.351821.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-25T08-50-29.351821.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_01_25T08_50_29.351821", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-25T08-50-29.351821.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-25T08-50-29.351821.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_01_25T08_50_29.351821", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-25T08-50-29.351821.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-25T08-50-29.351821.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_01_25T08_50_29.351821", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-25T08-50-29.351821.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-25T08-50-29.351821.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_01_25T08_50_29.351821", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-25T08-50-29.351821.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-25T08-50-29.351821.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_01_25T08_50_29.351821", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-25T08-50-29.351821.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-25T08-50-29.351821.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_01_25T08_50_29.351821", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-25T08-50-29.351821.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-25T08-50-29.351821.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_01_25T08_50_29.351821", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-25T08-50-29.351821.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-25T08-50-29.351821.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_01_25T08_50_29.351821", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-25T08-50-29.351821.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-25T08-50-29.351821.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_01_25T08_50_29.351821", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-25T08-50-29.351821.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-25T08-50-29.351821.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_01_25T08_50_29.351821", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-25T08-50-29.351821.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-25T08-50-29.351821.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_01_25T08_50_29.351821", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-25T08-50-29.351821.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-25T08-50-29.351821.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_01_25T08_50_29.351821", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-25T08-50-29.351821.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-25T08-50-29.351821.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_01_25T08_50_29.351821", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-25T08-50-29.351821.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-25T08-50-29.351821.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_01_25T08_50_29.351821", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-25T08-50-29.351821.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-25T08-50-29.351821.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_01_25T08_50_29.351821", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-25T08-50-29.351821.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-25T08-50-29.351821.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_01_25T08_50_29.351821", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-25T08-50-29.351821.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-25T08-50-29.351821.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_01_25T08_50_29.351821", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-25T08-50-29.351821.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-25T08-50-29.351821.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_01_25T08_50_29.351821", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-25T08-50-29.351821.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-25T08-50-29.351821.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_01_25T08_50_29.351821", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-25T08-50-29.351821.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-25T08-50-29.351821.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_01_25T08_50_29.351821", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-25T08-50-29.351821.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-25T08-50-29.351821.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_01_25T08_50_29.351821", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-25T08-50-29.351821.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-25T08-50-29.351821.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_01_25T08_50_29.351821", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-25T08-50-29.351821.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-25T08-50-29.351821.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_01_25T08_50_29.351821", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-25T08-50-29.351821.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-25T08-50-29.351821.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_01_25T08_50_29.351821", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-25T08-50-29.351821.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-25T08-50-29.351821.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_01_25T08_50_29.351821", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-25T08-50-29.351821.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-25T08-50-29.351821.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_01_25T08_50_29.351821", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-25T08-50-29.351821.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-25T08-50-29.351821.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_01_25T08_50_29.351821", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-25T08-50-29.351821.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-25T08-50-29.351821.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_01_25T08_50_29.351821", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-25T08-50-29.351821.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-25T08-50-29.351821.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_01_25T08_50_29.351821", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-25T08-50-29.351821.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-25T08-50-29.351821.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_01_25T08_50_29.351821", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-25T08-50-29.351821.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-25T08-50-29.351821.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_01_25T08_50_29.351821", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-25T08-50-29.351821.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-25T08-50-29.351821.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_01_25T08_50_29.351821", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-25T08-50-29.351821.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-25T08-50-29.351821.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_01_25T08_50_29.351821", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-25T08-50-29.351821.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-25T08-50-29.351821.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_01_25T08_50_29.351821", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-25T08-50-29.351821.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-25T08-50-29.351821.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_01_25T08_50_29.351821", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-25T08-50-29.351821.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-25T08-50-29.351821.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_01_25T08_50_29.351821", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-25T08-50-29.351821.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-25T08-50-29.351821.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_01_25T08_50_29.351821", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-25T08-50-29.351821.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-25T08-50-29.351821.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_01_25T08_50_29.351821", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-25T08-50-29.351821.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-25T08-50-29.351821.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_01_25T08_50_29.351821", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-25T08-50-29.351821.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-25T08-50-29.351821.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_01_25T08_50_29.351821", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-25T08-50-29.351821.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-25T08-50-29.351821.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_01_25T08_50_29.351821", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-25T08-50-29.351821.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-25T08-50-29.351821.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_01_25T08_50_29.351821", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-25T08-50-29.351821.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-25T08-50-29.351821.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_01_25T08_50_29.351821", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-25T08-50-29.351821.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-25T08-50-29.351821.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_01_25T08_50_29.351821", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-25T08-50-29.351821.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-25T08-50-29.351821.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_01_25T08_50_29.351821", "path": ["**/details_harness|winogrande|5_2024-01-25T08-50-29.351821.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-01-25T08-50-29.351821.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_01_25T08_50_29.351821", "path": ["results_2024-01-25T08-50-29.351821.parquet"]}, {"split": "latest", "path": ["results_2024-01-25T08-50-29.351821.parquet"]}]}]}
2024-01-25T08:53:02+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of abacusai/MetaMath-Bagel-DPO-34B Dataset automatically created during the evaluation run of model abacusai/MetaMath-Bagel-DPO-34B on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-01-25T08:50:29.351821(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of abacusai/MetaMath-Bagel-DPO-34B\n\n\n\nDataset automatically created during the evaluation run of model abacusai/MetaMath-Bagel-DPO-34B on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-25T08:50:29.351821(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of abacusai/MetaMath-Bagel-DPO-34B\n\n\n\nDataset automatically created during the evaluation run of model abacusai/MetaMath-Bagel-DPO-34B on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-25T08:50:29.351821(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ 6, 191, 68, 4, 40, 29, 3, 4, 9, 6, 5, 7, 4, 7, 10, 9, 5, 9, 8, 10, 46, 8, 7, 10, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of abacusai/MetaMath-Bagel-DPO-34B\n\n\n\nDataset automatically created during the evaluation run of model abacusai/MetaMath-Bagel-DPO-34B on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2024-01-25T08:50:29.351821(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):## Dataset Details### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:## Uses### Direct Use### Out-of-Scope Use## Dataset Structure## Dataset Creation### Curation Rationale### Source Data#### Data Collection and Processing#### Who are the source data producers?### Annotations [optional]#### Annotation process#### Who are the annotators?#### Personal and Sensitive Information## Bias, Risks, and Limitations### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:## Glossary [optional]## More Information [optional]## Dataset Card Authors [optional]" ]
[ -0.047877296805381775, 0.2202860563993454, -0.004263308830559254, 0.03283722698688507, 0.08736647665500641, -0.012114109471440315, 0.03061973676085472, 0.10854664444923401, -0.02021857164800167, 0.17830969393253326, -0.021901991218328476, 0.09879323840141296, 0.07923556864261627, 0.13696321845054626, 0.014666651375591755, -0.13927806913852692, 0.018222849816083908, -0.07749171555042267, 0.06883262097835541, 0.07694046199321747, 0.0908871665596962, -0.08148426562547684, 0.056564442813396454, -0.045391567051410675, 0.0010864674113690853, -0.0064338878728449345, -0.09733385592699051, -0.04824481159448624, 0.08329388499259949, 0.09168992191553116, 0.045100126415491104, -0.011987033300101757, 0.012692893855273724, -0.25432088971138, 0.01446653250604868, 0.09075577557086945, 0.003131725825369358, 0.03577112406492233, 0.11799750477075577, -0.0789049044251442, 0.04099994897842407, -0.07131756097078323, 0.0720931738615036, 0.045450326055288315, -0.11942435055971146, -0.14161176979541779, -0.1454506814479828, 0.013552606105804443, 0.06486661732196808, 0.04558785259723663, -0.02388218604028225, 0.1457190066576004, -0.028641002252697945, 0.04123811796307564, 0.13055013120174408, -0.10079176723957062, -0.02125648781657219, 0.06580755114555359, 0.024725131690502167, 0.06635922193527222, -0.08464838564395905, -0.012709660455584526, 0.03594203293323517, 0.04908447340130806, 0.02085714600980282, 0.00885392352938652, -0.02638215944170952, 0.015643814578652382, -0.1460971236228943, -0.12283019721508026, 0.17887631058692932, 0.010639435611665249, -0.043748654425144196, -0.19415414333343506, -0.015629008412361145, 0.019853847101330757, -0.0025076321326196194, -0.054973457008600235, 0.013162619434297085, -0.019684910774230957, 0.08629424124956131, -0.013854988850653172, -0.09485319256782532, -0.012277979403734207, 0.004037744831293821, 0.04837926849722862, 0.018921254202723503, -0.025473106652498245, -0.005659657996147871, 0.10992579162120819, -0.01500690821558237, -0.09079429507255554, -0.0697869136929512, -0.05837377533316612, -0.08901455253362656, -0.05190163850784302, 0.009510591626167297, -0.0848495364189148, 0.036075394600629807, 0.22896355390548706, -0.022107703611254692, 0.01757143810391426, -0.1097041592001915, 0.0071187191642820835, 0.13099132478237152, 0.05869890749454498, -0.07550262659788132, -0.062091998755931854, -0.018870653584599495, 0.022207528352737427, 0.0369255356490612, -0.015409254468977451, 0.01876119337975979, 0.07211656868457794, 0.047999873757362366, 0.11758220940828323, 0.1290108561515808, 0.03256065025925636, -0.06995167583227158, -0.03565336391329765, 0.23027728497982025, -0.15303738415241241, -0.023951470851898193, -0.0015042234444990754, -0.0416850671172142, -0.11112180352210999, 0.08748994767665863, 0.00964002963155508, -0.041550759226083755, 0.12908242642879486, -0.04760797321796417, -0.0860246941447258, -0.0728788673877716, -0.049049217253923416, 0.06281391531229019, 0.004424673039466143, -0.03156469762325287, -0.0840601921081543, -0.07435017079114914, -0.07911000400781631, 0.022180402651429176, -0.06362999230623245, -0.031371716409921646, 0.03031829372048378, -0.007493229582905769, -0.018916886299848557, -0.014893216080963612, 0.09810248762369156, -0.05055893585085869, 0.03356312960386276, 0.014076200313866138, 0.01492922194302082, 0.08202967792749405, 0.05591193214058876, -0.12433761358261108, 0.08781172335147858, -0.15270476043224335, 0.09286526590585709, -0.11106535792350769, -0.005681266076862812, -0.14454303681850433, -0.004062228370457888, -0.01851007342338562, 0.012885130941867828, -0.0064710951410233974, 0.10011731833219528, -0.21275492012500763, 0.015096766874194145, 0.13797013461589813, -0.10078198462724686, -0.11392424255609512, 0.06608034670352936, -0.049890704452991486, 0.08256454765796661, 0.05652875825762749, 0.11237149685621262, 0.10475680232048035, -0.07538460940122604, -0.11780650913715363, -0.08977831900119781, -0.02615421451628208, 0.1542026549577713, 0.0720110684633255, -0.07743054628372192, 0.1286991983652115, 0.04192426800727844, -0.008902379311621189, -0.09400971233844757, -0.006097806617617607, -0.06804916262626648, -0.013979852199554443, -0.06211176887154579, -0.07613854110240936, -0.0023335982114076614, -0.08342765271663666, -0.015528988093137741, -0.08200255781412125, 0.0196211040019989, 0.08438294380903244, -0.02391616627573967, 0.01031848881393671, -0.057124871760606766, 0.04156578704714775, 0.0013500285567715764, 0.021862616762518883, -0.21365436911582947, -0.11240395903587341, 0.033055759966373444, -0.14556729793548584, 0.05294765159487724, 0.023499902337789536, 0.01165293250232935, 0.03463092818856239, -0.014732416719198227, 0.033051781356334686, 0.0142070846632123, 0.0015500338049605489, -0.014480767771601677, -0.13835155963897705, -0.04800787568092346, -0.08292783051729202, 0.10119672119617462, -0.12272225320339203, -0.015602302737534046, 0.07163139432668686, 0.1669212281703949, 0.007582590915262699, -0.08340921252965927, 0.08270380645990372, -0.0020762288477271795, -0.02958785369992256, -0.06291508674621582, 0.006511525716632605, -0.02233228087425232, 0.03484463691711426, 0.03495834767818451, -0.20706744492053986, -0.1457904875278473, 0.07837333530187607, 0.13338065147399902, -0.07496973127126694, -0.09696924686431885, -0.06953420490026474, -0.0664781853556633, -0.07900601625442505, -0.06883920729160309, 0.06610571593046188, 0.0695841982960701, 0.03745085000991821, -0.0707608237862587, -0.06596691161394119, 0.011058849282562733, 0.053086794912815094, -0.08410506695508957, 0.09160465747117996, 0.057534776628017426, -0.09420894086360931, 0.10733863711357117, 0.003496799385175109, 0.13580471277236938, 0.06659277528524399, 0.02759614959359169, -0.09858597815036774, -0.0008556455140933394, 0.052295077592134476, 0.042560067027807236, 0.07102274894714355, -0.03592166677117348, 0.037697676569223404, 0.08142097294330597, -0.0029166575986891985, 0.041547033935785294, -0.04265022650361061, 0.03059852309525013, 0.048344649374485016, 0.007488420698791742, 0.03492293879389763, 0.009593285620212555, 0.003091061022132635, 0.06763753294944763, 0.044796183705329895, 0.09767071157693863, -0.012669670395553112, -0.04254526272416115, -0.09100309014320374, 0.13057996332645416, -0.08286867290735245, -0.2776547968387604, -0.14161346852779388, -0.05372440814971924, -0.046334538608789444, -0.012888099998235703, 0.06748629361391068, -0.004384845960885286, -0.0932564064860344, -0.09649646282196045, 0.046270325779914856, 0.006541747599840164, -0.1231844574213028, -0.04515785351395607, 0.05886101722717285, 0.010439688339829445, -0.16755738854408264, 0.04063766449689865, 0.051050815731287, -0.040086910128593445, -0.0032129124738276005, 0.08555787056684494, 0.13999773561954498, 0.07856865972280502, 0.05806812271475792, -0.02728390321135521, -0.009725636802613735, 0.18553021550178528, -0.10930642485618591, 0.027768997475504875, 0.11880651861429214, -0.04911242052912712, 0.06674511730670929, 0.18214528262615204, 0.0076056732796132565, -0.10581523180007935, 0.05664410442113876, 0.09270290285348892, -0.0662718191742897, -0.24782787263393402, -0.109921894967556, -0.013921515084803104, 0.0036922539584338665, 0.1044732928276062, 0.05700216069817543, 0.025733260437846184, 0.019698401913046837, -0.11553186178207397, -0.020572291687130928, -0.05628203973174095, 0.07970017194747925, 0.054536934942007065, -0.006315796170383692, 0.049063581973314285, -0.03809000551700592, 0.014097701758146286, 0.11608725041151047, 0.04279382899403572, 0.14388205111026764, -0.029478387907147408, 0.15741047263145447, 0.09174113720655441, 0.08325820416212082, -0.04802586883306503, 0.03912151977419853, 0.008626827970147133, 0.07074964791536331, -0.018847428262233734, -0.10931426286697388, -0.047752317041158676, 0.09804211556911469, 0.014771290123462677, -0.07651301473379135, 0.017070306465029716, -0.0696912482380867, 0.03650030493736267, 0.20246727764606476, -0.022879937663674355, -0.15130436420440674, -0.05592726916074753, 0.060779597610235214, -0.023892713710665703, -0.08298665285110474, -0.03112831525504589, 0.04508743807673454, -0.1460142582654953, 0.03538160026073456, -0.02901899628341198, 0.07915906608104706, -0.13398493826389313, -0.01423082035034895, -0.023927200585603714, 0.030816445127129555, 0.0017044016858562827, 0.11858394742012024, -0.14481864869594574, 0.11305905878543854, 0.01053437776863575, 0.011614672839641571, -0.11305475234985352, 0.046814434230327606, -0.049440156668424606, -0.05272366851568222, 0.14399245381355286, -0.021919602528214455, -0.07444319128990173, -0.05029464140534401, -0.1234702318906784, -0.00839289091527462, 0.07774020731449127, -0.13116876780986786, 0.1046503409743309, 0.03846977651119232, -0.01664847880601883, -0.02434075064957142, 0.00771746039390564, -0.13383206725120544, -0.2264566570520401, 0.1162017360329628, -0.09650588780641556, 0.07159247994422913, -0.051385071128606796, -0.04065227136015892, -0.051998499780893326, 0.18259242177009583, -0.06936682760715485, -0.05372127890586853, -0.1193941980600357, 0.034667398780584335, 0.16615203022956848, -0.04526999965310097, 0.05348317325115204, -0.047715894877910614, 0.17272840440273285, -0.008985861204564571, -0.05103713274002075, -0.000988881103694439, -0.08594644069671631, -0.15486560761928558, -0.04288392886519432, 0.12844441831111908, 0.06603731960058212, 0.01345206517726183, 0.006398136727511883, 0.04053318873047829, 0.026065610349178314, -0.08351936936378479, 0.030969930812716484, 0.11296883225440979, 0.14470255374908447, 0.03179185092449188, -0.03315200284123421, -0.08338087797164917, -0.11276803910732269, -0.09971408545970917, 0.06522508710622787, 0.14418545365333557, -0.06491933763027191, 0.1593918800354004, 0.1262035220861435, -0.0976136177778244, -0.19457228481769562, -0.057459406554698944, 0.023607948794960976, -0.024161921814084053, 0.12140876799821854, -0.19106552004814148, 0.07394129782915115, 0.06137331575155258, -0.017272232100367546, 0.07397158443927765, -0.23729079961776733, -0.1386440247297287, 0.027460677549242973, 0.026438070461153984, -0.23747891187667847, -0.17965973913669586, -0.10637930035591125, -0.03854186832904816, -0.14889824390411377, 0.13087788224220276, -0.012747167609632015, 0.028199439868330956, -0.009214748628437519, 0.06384661048650742, 0.05687471479177475, -0.0648229569196701, 0.1366804838180542, 0.0101045873016119, 0.015027450397610664, -0.11323490738868713, -0.013162313960492611, 0.0179251991212368, -0.05041796341538429, 0.10455930233001709, 0.05177704989910126, 0.05394583195447922, -0.08026037365198135, -0.0404619462788105, -0.04967109113931656, 0.0454125814139843, -0.06618396192789078, -0.06547816842794418, -0.07397887855768204, 0.08717337995767593, 0.08411699533462524, -0.0177387073636055, 0.03167473152279854, -0.03770586848258972, 0.04491230472922325, 0.20835214853286743, 0.11979799717664719, 0.04197676107287407, -0.118686743080616, -0.02397880144417286, -0.01259942539036274, -0.010139459744095802, -0.1470540314912796, 0.038030099123716354, 0.08338488638401031, 0.04290717467665672, 0.07351066917181015, -0.030380085110664368, -0.1949070245027542, -0.005940596107393503, 0.08642932772636414, -0.11018480360507965, -0.21033073961734772, 0.029969656839966774, 0.144230917096138, -0.16801108419895172, -0.058581236749887466, 0.09470226615667343, 0.017141887918114662, -0.03285886347293854, -0.0008524375734850764, 0.0767090693116188, 0.04196668043732643, 0.09636770188808441, 0.01042838767170906, 0.04856972023844719, -0.06906230002641678, 0.11548537760972977, 0.16628248989582062, -0.1304139792919159, 0.028186656534671783, 0.03923215717077255, -0.05143963545560837, -0.060309458523988724, 0.01908285729587078, -0.0053983451798558235, 0.02220568247139454, -0.03938539698719978, 0.023629970848560333, 0.0011514009675011039, 0.04007796570658684, 0.1135491207242012, 0.010420926846563816, 0.043653689324855804, 0.032363228499889374, -0.011178970336914062, -0.08396925032138824, 0.09239661693572998, 0.02865775302052498, 0.040104735642671585, -0.03834519162774086, 0.01914750225841999, 0.018512317910790443, -0.0029632109217345715, 0.012923717498779297, -0.04073940962553024, -0.058830197900533676, -0.005966415628790855, -0.13670040667057037, 0.028554076328873634, -0.0784464031457901, -0.0028951847925782204, -0.01698707416653633, -0.013629047200083733, -0.010967819951474667, 0.008191187866032124, -0.05386148393154144, -0.06411101669073105, -0.0447712279856205, 0.13113181293010712, -0.20169168710708618, -0.011874692514538765, 0.09051856398582458, -0.05717635899782181, 0.07534332573413849, -0.002075888216495514, -0.0140489237383008, 0.0014780799392610788, -0.0665687695145607, -0.013414611108601093, -0.018012328073382378, 0.04560195282101631, 0.014984598383307457, -0.17250452935695648, -0.022827645763754845, 0.010161776095628738, -0.08627723157405853, -0.00007324429316213354, 0.055907320231199265, -0.15887729823589325, 0.0077449665404856205, 0.06867582350969315, -0.03245293349027634, -0.0489560104906559, 0.03570129722356796, 0.05517496541142464, 0.004736269824206829, 0.08752794563770294, -0.0021305223926901817, 0.04238639771938324, -0.15545323491096497, -0.05390666052699089, -0.015371481887996197, 0.0026543913409113884, 0.031038455665111542, 0.03312398120760918, 0.03386376425623894, 0.00044451392022892833, 0.21455733478069305, -0.008960328996181488, 0.0789061039686203, 0.039598248898983, -0.002040123799815774, -0.017777835950255394, 0.02795351855456829, 0.027530768886208534, 0.012598891742527485, 0.02361282706260681, 0.04208292439579964, -0.009640965610742569, -0.04146086052060127, -0.02936684899032116, 0.05739254876971245, 0.17120711505413055, 0.1664271056652069, -0.05361247435212135, 0.07639587670564651, -0.16107304394245148, -0.0511004738509655, 0.028921030461788177, -0.009740956127643585, 0.04185669124126434, -0.07108879834413528, 0.01925240270793438, 0.06894326210021973, -0.10230059176683426, 0.1457318216562271, -0.07247344404459, -0.03443469479680061, -0.031361937522888184, -0.1349991112947464, -0.04225916415452957, 0.015685241669416428, 0.006872927770018578, -0.09842399507761002, 0.09593553841114044, 0.132234126329422, -0.020315922796726227, -0.014745676890015602, 0.10918616503477097, -0.08480324596166611, -0.06236395984888077, -0.02441684529185295, 0.006300705950707197, 0.014898066408932209, -0.011121521703898907, 0.08397281169891357, 0.010497486218810081, 0.08927054703235626, 0.062455225735902786, 0.09189391881227493, 0.05076413229107857, 0.010808466002345085, -0.05038648843765259, -0.07005131244659424, -0.004774028435349464, -0.002814385574311018, -0.04417940601706505, 0.20639076828956604, 0.045864276587963104, 0.018070874735713005, 0.006285803858190775, 0.2099105268716812, 0.010408108122646809, -0.0658067911863327, -0.13629542291164398, 0.09979607164859772, -0.0022535379976034164, 0.018188899382948875, 0.024123162031173706, -0.1403840035200119, 0.022973891347646713, 0.15779142081737518, 0.11933647841215134, 0.03583287075161934, 0.011521567590534687, 0.022988367825746536, 0.027246667072176933, -0.027717720717191696, 0.032411012798547745, 0.04824897646903992, 0.17456571757793427, -0.05861737206578255, 0.05786146968603134, -0.0126419086009264, -0.009010974317789078, -0.02610250748693943, 0.10024847090244293, -0.035439230501651764, 0.0158956591039896, -0.0446820929646492, 0.11339101195335388, -0.029893141239881516, -0.25687795877456665, -0.043409667909145355, -0.10451117157936096, -0.13300944864749908, -0.02451058104634285, 0.006748552434146404, -0.03131329268217087, 0.038604769855737686, 0.04515806585550308, -0.022303292527794838, 0.19793473184108734, 0.017468901351094246, -0.08197657018899918, -0.05718551203608513, 0.06817135214805603, -0.010333554819226265, 0.25667470693588257, -0.005666916724294424, 0.06044291704893112, 0.09789865463972092, -0.019934194162487984, -0.15146757662296295, 0.00417430279776454, 0.1109207421541214, -0.027902834117412567, 0.06194361671805382, 0.16621318459510803, -0.03416813910007477, 0.12383290380239487, 0.04929035156965256, -0.04551345109939575, 0.05598107725381851, 0.06926393508911133, 0.04996917396783829, -0.1056545302271843, 0.08789500594139099, -0.09200632572174072, 0.14164815843105316, 0.11537106335163116, -0.03627268597483635, 0.0020422525703907013, -0.06177213415503502, 0.06468260288238525, -0.023015804588794708, 0.11011473834514618, -0.012132067233324051, -0.1580202281475067, 0.028094712644815445, 0.010847442783415318, 0.05302313342690468, -0.2361885905265808, -0.07013897597789764, 0.13704244792461395, -0.043278343975543976, 0.0075674355030059814, 0.08277411013841629, 0.037573885172605515, -0.0030254845041781664, -0.0720299482345581, -0.09105906635522842, -0.0076570105738937855, 0.11996671557426453, -0.09965169429779053, -0.041075699031353 ]
521816ff31066d9821a18c3ec9331c65ffe8c370
# Dataset Card for Evaluation run of jsfs11/SnorkelWestBeagle-DARETIES-7B <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [jsfs11/SnorkelWestBeagle-DARETIES-7B](https://huggingface.co/jsfs11/SnorkelWestBeagle-DARETIES-7B) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_jsfs11__SnorkelWestBeagle-DARETIES-7B", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-01-25T09:47:01.298299](https://huggingface.co/datasets/open-llm-leaderboard/details_jsfs11__SnorkelWestBeagle-DARETIES-7B/blob/main/results_2024-01-25T09-47-01.298299.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.6479799815457161, "acc_stderr": 0.032161535797548865, "acc_norm": 0.6485286656946667, "acc_norm_stderr": 0.03282087535821274, "mc1": 0.5630354957160343, "mc1_stderr": 0.017363844503195953, "mc2": 0.7005107732516146, "mc2_stderr": 0.014999534657573073 }, "harness|arc:challenge|25": { "acc": 0.6843003412969283, "acc_stderr": 0.01358257109581529, "acc_norm": 0.71160409556314, "acc_norm_stderr": 0.013238394422428173 }, "harness|hellaswag|10": { "acc": 0.711611232822147, "acc_stderr": 0.004520870679457037, "acc_norm": 0.8735311690898228, "acc_norm_stderr": 0.0033169770861701505 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.33, "acc_stderr": 0.04725815626252605, "acc_norm": 0.33, "acc_norm_stderr": 0.04725815626252605 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.6296296296296297, "acc_stderr": 0.041716541613545426, "acc_norm": 0.6296296296296297, "acc_norm_stderr": 0.041716541613545426 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.6973684210526315, "acc_stderr": 0.03738520676119669, "acc_norm": 0.6973684210526315, "acc_norm_stderr": 0.03738520676119669 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.59, "acc_stderr": 0.049431107042371025, "acc_norm": 0.59, "acc_norm_stderr": 0.049431107042371025 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.720754716981132, "acc_stderr": 0.027611163402399715, "acc_norm": 0.720754716981132, "acc_norm_stderr": 0.027611163402399715 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.7638888888888888, "acc_stderr": 0.03551446610810826, "acc_norm": 0.7638888888888888, "acc_norm_stderr": 0.03551446610810826 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.46, "acc_stderr": 0.05009082659620333, "acc_norm": 0.46, "acc_norm_stderr": 0.05009082659620333 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.53, "acc_stderr": 0.050161355804659205, "acc_norm": 0.53, "acc_norm_stderr": 0.050161355804659205 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.26, "acc_stderr": 0.04408440022768077, "acc_norm": 0.26, "acc_norm_stderr": 0.04408440022768077 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.6705202312138728, "acc_stderr": 0.03583901754736412, "acc_norm": 0.6705202312138728, "acc_norm_stderr": 0.03583901754736412 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.38235294117647056, "acc_stderr": 0.04835503696107223, "acc_norm": 0.38235294117647056, "acc_norm_stderr": 0.04835503696107223 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.8, "acc_stderr": 0.04020151261036845, "acc_norm": 0.8, "acc_norm_stderr": 0.04020151261036845 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.5787234042553191, "acc_stderr": 0.03227834510146267, "acc_norm": 0.5787234042553191, "acc_norm_stderr": 0.03227834510146267 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.49122807017543857, "acc_stderr": 0.04702880432049615, "acc_norm": 0.49122807017543857, "acc_norm_stderr": 0.04702880432049615 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.5586206896551724, "acc_stderr": 0.04137931034482757, "acc_norm": 0.5586206896551724, "acc_norm_stderr": 0.04137931034482757 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.3994708994708995, "acc_stderr": 0.02522545028406788, "acc_norm": 0.3994708994708995, "acc_norm_stderr": 0.02522545028406788 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.48412698412698413, "acc_stderr": 0.04469881854072606, "acc_norm": 0.48412698412698413, "acc_norm_stderr": 0.04469881854072606 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.4, "acc_stderr": 0.04923659639173309, "acc_norm": 0.4, "acc_norm_stderr": 0.04923659639173309 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.7709677419354839, "acc_stderr": 0.023904914311782655, "acc_norm": 0.7709677419354839, "acc_norm_stderr": 0.023904914311782655 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.4975369458128079, "acc_stderr": 0.03517945038691063, "acc_norm": 0.4975369458128079, "acc_norm_stderr": 0.03517945038691063 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.7, "acc_stderr": 0.046056618647183814, "acc_norm": 0.7, "acc_norm_stderr": 0.046056618647183814 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.7818181818181819, "acc_stderr": 0.03225078108306289, "acc_norm": 0.7818181818181819, "acc_norm_stderr": 0.03225078108306289 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.7828282828282829, "acc_stderr": 0.029376616484945633, "acc_norm": 0.7828282828282829, "acc_norm_stderr": 0.029376616484945633 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.9015544041450777, "acc_stderr": 0.021500249576033477, "acc_norm": 0.9015544041450777, "acc_norm_stderr": 0.021500249576033477 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.6641025641025641, "acc_stderr": 0.023946724741563973, "acc_norm": 0.6641025641025641, "acc_norm_stderr": 0.023946724741563973 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.37407407407407406, "acc_stderr": 0.02950286112895529, "acc_norm": 0.37407407407407406, "acc_norm_stderr": 0.02950286112895529 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.680672268907563, "acc_stderr": 0.030283995525884396, "acc_norm": 0.680672268907563, "acc_norm_stderr": 0.030283995525884396 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.3509933774834437, "acc_stderr": 0.03896981964257375, "acc_norm": 0.3509933774834437, "acc_norm_stderr": 0.03896981964257375 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.8275229357798165, "acc_stderr": 0.016197807956848043, "acc_norm": 0.8275229357798165, "acc_norm_stderr": 0.016197807956848043 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.5277777777777778, "acc_stderr": 0.0340470532865388, "acc_norm": 0.5277777777777778, "acc_norm_stderr": 0.0340470532865388 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.8235294117647058, "acc_stderr": 0.026756401538078966, "acc_norm": 0.8235294117647058, "acc_norm_stderr": 0.026756401538078966 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.8016877637130801, "acc_stderr": 0.02595502084162113, "acc_norm": 0.8016877637130801, "acc_norm_stderr": 0.02595502084162113 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.672645739910314, "acc_stderr": 0.03149384670994131, "acc_norm": 0.672645739910314, "acc_norm_stderr": 0.03149384670994131 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.7480916030534351, "acc_stderr": 0.03807387116306085, "acc_norm": 0.7480916030534351, "acc_norm_stderr": 0.03807387116306085 }, "harness|hendrycksTest-international_law|5": { "acc": 0.7603305785123967, "acc_stderr": 0.03896878985070417, "acc_norm": 0.7603305785123967, "acc_norm_stderr": 0.03896878985070417 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.7129629629629629, "acc_stderr": 0.043733130409147614, "acc_norm": 0.7129629629629629, "acc_norm_stderr": 0.043733130409147614 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.754601226993865, "acc_stderr": 0.03380939813943354, "acc_norm": 0.754601226993865, "acc_norm_stderr": 0.03380939813943354 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.5, "acc_stderr": 0.04745789978762494, "acc_norm": 0.5, "acc_norm_stderr": 0.04745789978762494 }, "harness|hendrycksTest-management|5": { "acc": 0.7572815533980582, "acc_stderr": 0.04245022486384495, "acc_norm": 0.7572815533980582, "acc_norm_stderr": 0.04245022486384495 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8974358974358975, "acc_stderr": 0.01987565502786744, "acc_norm": 0.8974358974358975, "acc_norm_stderr": 0.01987565502786744 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.7, "acc_stderr": 0.046056618647183814, "acc_norm": 0.7, "acc_norm_stderr": 0.046056618647183814 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.8237547892720306, "acc_stderr": 0.013625556907993464, "acc_norm": 0.8237547892720306, "acc_norm_stderr": 0.013625556907993464 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.7023121387283237, "acc_stderr": 0.024617055388677006, "acc_norm": 0.7023121387283237, "acc_norm_stderr": 0.024617055388677006 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.44581005586592176, "acc_stderr": 0.01662399851333311, "acc_norm": 0.44581005586592176, "acc_norm_stderr": 0.01662399851333311 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.7418300653594772, "acc_stderr": 0.025058503316958143, "acc_norm": 0.7418300653594772, "acc_norm_stderr": 0.025058503316958143 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.7106109324758842, "acc_stderr": 0.025755865922632945, "acc_norm": 0.7106109324758842, "acc_norm_stderr": 0.025755865922632945 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.7283950617283951, "acc_stderr": 0.024748624490537368, "acc_norm": 0.7283950617283951, "acc_norm_stderr": 0.024748624490537368 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.4716312056737589, "acc_stderr": 0.029779450957303062, "acc_norm": 0.4716312056737589, "acc_norm_stderr": 0.029779450957303062 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.46936114732724904, "acc_stderr": 0.012746237711716634, "acc_norm": 0.46936114732724904, "acc_norm_stderr": 0.012746237711716634 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.6727941176470589, "acc_stderr": 0.028501452860396553, "acc_norm": 0.6727941176470589, "acc_norm_stderr": 0.028501452860396553 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.6437908496732027, "acc_stderr": 0.0193733324207245, "acc_norm": 0.6437908496732027, "acc_norm_stderr": 0.0193733324207245 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.7181818181818181, "acc_stderr": 0.043091187099464585, "acc_norm": 0.7181818181818181, "acc_norm_stderr": 0.043091187099464585 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.726530612244898, "acc_stderr": 0.028535560337128445, "acc_norm": 0.726530612244898, "acc_norm_stderr": 0.028535560337128445 }, "harness|hendrycksTest-sociology|5": { "acc": 0.8656716417910447, "acc_stderr": 0.024112678240900808, "acc_norm": 0.8656716417910447, "acc_norm_stderr": 0.024112678240900808 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.88, "acc_stderr": 0.03265986323710906, "acc_norm": 0.88, "acc_norm_stderr": 0.03265986323710906 }, "harness|hendrycksTest-virology|5": { "acc": 0.5060240963855421, "acc_stderr": 0.03892212195333045, "acc_norm": 0.5060240963855421, "acc_norm_stderr": 0.03892212195333045 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.8362573099415205, "acc_stderr": 0.028380919596145866, "acc_norm": 0.8362573099415205, "acc_norm_stderr": 0.028380919596145866 }, "harness|truthfulqa:mc|0": { "mc1": 0.5630354957160343, "mc1_stderr": 0.017363844503195953, "mc2": 0.7005107732516146, "mc2_stderr": 0.014999534657573073 }, "harness|winogrande|5": { "acc": 0.8318863456985004, "acc_stderr": 0.010510336954166742 }, "harness|gsm8k|5": { "acc": 0.6209249431387415, "acc_stderr": 0.01336363029508836 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_jsfs11__SnorkelWestBeagle-DARETIES-7B
[ "region:us" ]
2024-01-25T09:49:19+00:00
{"pretty_name": "Evaluation run of jsfs11/SnorkelWestBeagle-DARETIES-7B", "dataset_summary": "Dataset automatically created during the evaluation run of model [jsfs11/SnorkelWestBeagle-DARETIES-7B](https://huggingface.co/jsfs11/SnorkelWestBeagle-DARETIES-7B) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_jsfs11__SnorkelWestBeagle-DARETIES-7B\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-01-25T09:47:01.298299](https://huggingface.co/datasets/open-llm-leaderboard/details_jsfs11__SnorkelWestBeagle-DARETIES-7B/blob/main/results_2024-01-25T09-47-01.298299.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.6479799815457161,\n \"acc_stderr\": 0.032161535797548865,\n \"acc_norm\": 0.6485286656946667,\n \"acc_norm_stderr\": 0.03282087535821274,\n \"mc1\": 0.5630354957160343,\n \"mc1_stderr\": 0.017363844503195953,\n \"mc2\": 0.7005107732516146,\n \"mc2_stderr\": 0.014999534657573073\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.6843003412969283,\n \"acc_stderr\": 0.01358257109581529,\n \"acc_norm\": 0.71160409556314,\n \"acc_norm_stderr\": 0.013238394422428173\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.711611232822147,\n \"acc_stderr\": 0.004520870679457037,\n \"acc_norm\": 0.8735311690898228,\n \"acc_norm_stderr\": 0.0033169770861701505\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.33,\n \"acc_stderr\": 0.04725815626252605,\n \"acc_norm\": 0.33,\n \"acc_norm_stderr\": 0.04725815626252605\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.6296296296296297,\n \"acc_stderr\": 0.041716541613545426,\n \"acc_norm\": 0.6296296296296297,\n \"acc_norm_stderr\": 0.041716541613545426\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.6973684210526315,\n \"acc_stderr\": 0.03738520676119669,\n \"acc_norm\": 0.6973684210526315,\n \"acc_norm_stderr\": 0.03738520676119669\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.59,\n \"acc_stderr\": 0.049431107042371025,\n \"acc_norm\": 0.59,\n \"acc_norm_stderr\": 0.049431107042371025\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.720754716981132,\n \"acc_stderr\": 0.027611163402399715,\n \"acc_norm\": 0.720754716981132,\n \"acc_norm_stderr\": 0.027611163402399715\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.7638888888888888,\n \"acc_stderr\": 0.03551446610810826,\n \"acc_norm\": 0.7638888888888888,\n \"acc_norm_stderr\": 0.03551446610810826\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.46,\n \"acc_stderr\": 0.05009082659620333,\n \"acc_norm\": 0.46,\n \"acc_norm_stderr\": 0.05009082659620333\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.53,\n \"acc_stderr\": 0.050161355804659205,\n \"acc_norm\": 0.53,\n \"acc_norm_stderr\": 0.050161355804659205\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.26,\n \"acc_stderr\": 0.04408440022768077,\n \"acc_norm\": 0.26,\n \"acc_norm_stderr\": 0.04408440022768077\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.6705202312138728,\n \"acc_stderr\": 0.03583901754736412,\n \"acc_norm\": 0.6705202312138728,\n \"acc_norm_stderr\": 0.03583901754736412\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.38235294117647056,\n \"acc_stderr\": 0.04835503696107223,\n \"acc_norm\": 0.38235294117647056,\n \"acc_norm_stderr\": 0.04835503696107223\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.8,\n \"acc_stderr\": 0.04020151261036845,\n \"acc_norm\": 0.8,\n \"acc_norm_stderr\": 0.04020151261036845\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.5787234042553191,\n \"acc_stderr\": 0.03227834510146267,\n \"acc_norm\": 0.5787234042553191,\n \"acc_norm_stderr\": 0.03227834510146267\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.49122807017543857,\n \"acc_stderr\": 0.04702880432049615,\n \"acc_norm\": 0.49122807017543857,\n \"acc_norm_stderr\": 0.04702880432049615\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.5586206896551724,\n \"acc_stderr\": 0.04137931034482757,\n \"acc_norm\": 0.5586206896551724,\n \"acc_norm_stderr\": 0.04137931034482757\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.3994708994708995,\n \"acc_stderr\": 0.02522545028406788,\n \"acc_norm\": 0.3994708994708995,\n \"acc_norm_stderr\": 0.02522545028406788\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.48412698412698413,\n \"acc_stderr\": 0.04469881854072606,\n \"acc_norm\": 0.48412698412698413,\n \"acc_norm_stderr\": 0.04469881854072606\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.4,\n \"acc_stderr\": 0.04923659639173309,\n \"acc_norm\": 0.4,\n \"acc_norm_stderr\": 0.04923659639173309\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.7709677419354839,\n \"acc_stderr\": 0.023904914311782655,\n \"acc_norm\": 0.7709677419354839,\n \"acc_norm_stderr\": 0.023904914311782655\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.4975369458128079,\n \"acc_stderr\": 0.03517945038691063,\n \"acc_norm\": 0.4975369458128079,\n \"acc_norm_stderr\": 0.03517945038691063\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.7,\n \"acc_stderr\": 0.046056618647183814,\n \"acc_norm\": 0.7,\n \"acc_norm_stderr\": 0.046056618647183814\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.7818181818181819,\n \"acc_stderr\": 0.03225078108306289,\n \"acc_norm\": 0.7818181818181819,\n \"acc_norm_stderr\": 0.03225078108306289\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.7828282828282829,\n \"acc_stderr\": 0.029376616484945633,\n \"acc_norm\": 0.7828282828282829,\n \"acc_norm_stderr\": 0.029376616484945633\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.9015544041450777,\n \"acc_stderr\": 0.021500249576033477,\n \"acc_norm\": 0.9015544041450777,\n \"acc_norm_stderr\": 0.021500249576033477\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.6641025641025641,\n \"acc_stderr\": 0.023946724741563973,\n \"acc_norm\": 0.6641025641025641,\n \"acc_norm_stderr\": 0.023946724741563973\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.37407407407407406,\n \"acc_stderr\": 0.02950286112895529,\n \"acc_norm\": 0.37407407407407406,\n \"acc_norm_stderr\": 0.02950286112895529\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.680672268907563,\n \"acc_stderr\": 0.030283995525884396,\n \"acc_norm\": 0.680672268907563,\n \"acc_norm_stderr\": 0.030283995525884396\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.3509933774834437,\n \"acc_stderr\": 0.03896981964257375,\n \"acc_norm\": 0.3509933774834437,\n \"acc_norm_stderr\": 0.03896981964257375\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.8275229357798165,\n \"acc_stderr\": 0.016197807956848043,\n \"acc_norm\": 0.8275229357798165,\n \"acc_norm_stderr\": 0.016197807956848043\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.5277777777777778,\n \"acc_stderr\": 0.0340470532865388,\n \"acc_norm\": 0.5277777777777778,\n \"acc_norm_stderr\": 0.0340470532865388\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.8235294117647058,\n \"acc_stderr\": 0.026756401538078966,\n \"acc_norm\": 0.8235294117647058,\n \"acc_norm_stderr\": 0.026756401538078966\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.8016877637130801,\n \"acc_stderr\": 0.02595502084162113,\n \"acc_norm\": 0.8016877637130801,\n \"acc_norm_stderr\": 0.02595502084162113\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.672645739910314,\n \"acc_stderr\": 0.03149384670994131,\n \"acc_norm\": 0.672645739910314,\n \"acc_norm_stderr\": 0.03149384670994131\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.7480916030534351,\n \"acc_stderr\": 0.03807387116306085,\n \"acc_norm\": 0.7480916030534351,\n \"acc_norm_stderr\": 0.03807387116306085\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.7603305785123967,\n \"acc_stderr\": 0.03896878985070417,\n \"acc_norm\": 0.7603305785123967,\n \"acc_norm_stderr\": 0.03896878985070417\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.7129629629629629,\n \"acc_stderr\": 0.043733130409147614,\n \"acc_norm\": 0.7129629629629629,\n \"acc_norm_stderr\": 0.043733130409147614\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.754601226993865,\n \"acc_stderr\": 0.03380939813943354,\n \"acc_norm\": 0.754601226993865,\n \"acc_norm_stderr\": 0.03380939813943354\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.5,\n \"acc_stderr\": 0.04745789978762494,\n \"acc_norm\": 0.5,\n \"acc_norm_stderr\": 0.04745789978762494\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.7572815533980582,\n \"acc_stderr\": 0.04245022486384495,\n \"acc_norm\": 0.7572815533980582,\n \"acc_norm_stderr\": 0.04245022486384495\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8974358974358975,\n \"acc_stderr\": 0.01987565502786744,\n \"acc_norm\": 0.8974358974358975,\n \"acc_norm_stderr\": 0.01987565502786744\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.7,\n \"acc_stderr\": 0.046056618647183814,\n \"acc_norm\": 0.7,\n \"acc_norm_stderr\": 0.046056618647183814\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.8237547892720306,\n \"acc_stderr\": 0.013625556907993464,\n \"acc_norm\": 0.8237547892720306,\n \"acc_norm_stderr\": 0.013625556907993464\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.7023121387283237,\n \"acc_stderr\": 0.024617055388677006,\n \"acc_norm\": 0.7023121387283237,\n \"acc_norm_stderr\": 0.024617055388677006\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.44581005586592176,\n \"acc_stderr\": 0.01662399851333311,\n \"acc_norm\": 0.44581005586592176,\n \"acc_norm_stderr\": 0.01662399851333311\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.7418300653594772,\n \"acc_stderr\": 0.025058503316958143,\n \"acc_norm\": 0.7418300653594772,\n \"acc_norm_stderr\": 0.025058503316958143\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.7106109324758842,\n \"acc_stderr\": 0.025755865922632945,\n \"acc_norm\": 0.7106109324758842,\n \"acc_norm_stderr\": 0.025755865922632945\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.7283950617283951,\n \"acc_stderr\": 0.024748624490537368,\n \"acc_norm\": 0.7283950617283951,\n \"acc_norm_stderr\": 0.024748624490537368\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.4716312056737589,\n \"acc_stderr\": 0.029779450957303062,\n \"acc_norm\": 0.4716312056737589,\n \"acc_norm_stderr\": 0.029779450957303062\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.46936114732724904,\n \"acc_stderr\": 0.012746237711716634,\n \"acc_norm\": 0.46936114732724904,\n \"acc_norm_stderr\": 0.012746237711716634\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.6727941176470589,\n \"acc_stderr\": 0.028501452860396553,\n \"acc_norm\": 0.6727941176470589,\n \"acc_norm_stderr\": 0.028501452860396553\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.6437908496732027,\n \"acc_stderr\": 0.0193733324207245,\n \"acc_norm\": 0.6437908496732027,\n \"acc_norm_stderr\": 0.0193733324207245\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.7181818181818181,\n \"acc_stderr\": 0.043091187099464585,\n \"acc_norm\": 0.7181818181818181,\n \"acc_norm_stderr\": 0.043091187099464585\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.726530612244898,\n \"acc_stderr\": 0.028535560337128445,\n \"acc_norm\": 0.726530612244898,\n \"acc_norm_stderr\": 0.028535560337128445\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.8656716417910447,\n \"acc_stderr\": 0.024112678240900808,\n \"acc_norm\": 0.8656716417910447,\n \"acc_norm_stderr\": 0.024112678240900808\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.88,\n \"acc_stderr\": 0.03265986323710906,\n \"acc_norm\": 0.88,\n \"acc_norm_stderr\": 0.03265986323710906\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5060240963855421,\n \"acc_stderr\": 0.03892212195333045,\n \"acc_norm\": 0.5060240963855421,\n \"acc_norm_stderr\": 0.03892212195333045\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.8362573099415205,\n \"acc_stderr\": 0.028380919596145866,\n \"acc_norm\": 0.8362573099415205,\n \"acc_norm_stderr\": 0.028380919596145866\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.5630354957160343,\n \"mc1_stderr\": 0.017363844503195953,\n \"mc2\": 0.7005107732516146,\n \"mc2_stderr\": 0.014999534657573073\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.8318863456985004,\n \"acc_stderr\": 0.010510336954166742\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.6209249431387415,\n \"acc_stderr\": 0.01336363029508836\n }\n}\n```", "repo_url": "https://huggingface.co/jsfs11/SnorkelWestBeagle-DARETIES-7B", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_01_25T09_47_01.298299", "path": ["**/details_harness|arc:challenge|25_2024-01-25T09-47-01.298299.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-01-25T09-47-01.298299.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_01_25T09_47_01.298299", "path": ["**/details_harness|gsm8k|5_2024-01-25T09-47-01.298299.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-01-25T09-47-01.298299.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_01_25T09_47_01.298299", "path": ["**/details_harness|hellaswag|10_2024-01-25T09-47-01.298299.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-01-25T09-47-01.298299.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_01_25T09_47_01.298299", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-25T09-47-01.298299.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-25T09-47-01.298299.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-25T09-47-01.298299.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-25T09-47-01.298299.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-25T09-47-01.298299.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-25T09-47-01.298299.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-25T09-47-01.298299.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-25T09-47-01.298299.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-25T09-47-01.298299.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-25T09-47-01.298299.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-25T09-47-01.298299.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-25T09-47-01.298299.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-25T09-47-01.298299.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-25T09-47-01.298299.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-25T09-47-01.298299.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-25T09-47-01.298299.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-25T09-47-01.298299.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-25T09-47-01.298299.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-25T09-47-01.298299.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-25T09-47-01.298299.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-25T09-47-01.298299.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-25T09-47-01.298299.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-25T09-47-01.298299.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-25T09-47-01.298299.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-25T09-47-01.298299.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-25T09-47-01.298299.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-25T09-47-01.298299.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-25T09-47-01.298299.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-25T09-47-01.298299.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-25T09-47-01.298299.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-25T09-47-01.298299.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-25T09-47-01.298299.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-25T09-47-01.298299.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-25T09-47-01.298299.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-25T09-47-01.298299.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-25T09-47-01.298299.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-25T09-47-01.298299.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-25T09-47-01.298299.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-25T09-47-01.298299.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-25T09-47-01.298299.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-25T09-47-01.298299.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-25T09-47-01.298299.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-25T09-47-01.298299.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-25T09-47-01.298299.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-25T09-47-01.298299.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-25T09-47-01.298299.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-25T09-47-01.298299.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-25T09-47-01.298299.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-25T09-47-01.298299.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-25T09-47-01.298299.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-25T09-47-01.298299.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-25T09-47-01.298299.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-25T09-47-01.298299.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-25T09-47-01.298299.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-25T09-47-01.298299.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-25T09-47-01.298299.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-25T09-47-01.298299.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-25T09-47-01.298299.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-25T09-47-01.298299.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-25T09-47-01.298299.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-25T09-47-01.298299.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-25T09-47-01.298299.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-25T09-47-01.298299.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-25T09-47-01.298299.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-25T09-47-01.298299.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-25T09-47-01.298299.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-25T09-47-01.298299.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-25T09-47-01.298299.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-25T09-47-01.298299.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-25T09-47-01.298299.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-25T09-47-01.298299.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-25T09-47-01.298299.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-25T09-47-01.298299.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-25T09-47-01.298299.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-25T09-47-01.298299.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-25T09-47-01.298299.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-25T09-47-01.298299.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-25T09-47-01.298299.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-25T09-47-01.298299.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-25T09-47-01.298299.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-25T09-47-01.298299.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-25T09-47-01.298299.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-25T09-47-01.298299.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-25T09-47-01.298299.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-25T09-47-01.298299.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-25T09-47-01.298299.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-25T09-47-01.298299.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-25T09-47-01.298299.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-25T09-47-01.298299.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-25T09-47-01.298299.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-25T09-47-01.298299.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-25T09-47-01.298299.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-25T09-47-01.298299.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-25T09-47-01.298299.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-25T09-47-01.298299.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-25T09-47-01.298299.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-25T09-47-01.298299.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-25T09-47-01.298299.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-25T09-47-01.298299.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-25T09-47-01.298299.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-25T09-47-01.298299.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-25T09-47-01.298299.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-25T09-47-01.298299.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-25T09-47-01.298299.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-25T09-47-01.298299.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-25T09-47-01.298299.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-25T09-47-01.298299.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-25T09-47-01.298299.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-25T09-47-01.298299.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-25T09-47-01.298299.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-25T09-47-01.298299.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-25T09-47-01.298299.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-25T09-47-01.298299.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-25T09-47-01.298299.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_01_25T09_47_01.298299", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-25T09-47-01.298299.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-25T09-47-01.298299.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_01_25T09_47_01.298299", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-25T09-47-01.298299.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-25T09-47-01.298299.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_01_25T09_47_01.298299", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-25T09-47-01.298299.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-25T09-47-01.298299.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_01_25T09_47_01.298299", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-25T09-47-01.298299.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-25T09-47-01.298299.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_01_25T09_47_01.298299", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-25T09-47-01.298299.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-25T09-47-01.298299.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_01_25T09_47_01.298299", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-25T09-47-01.298299.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-25T09-47-01.298299.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_01_25T09_47_01.298299", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-25T09-47-01.298299.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-25T09-47-01.298299.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_01_25T09_47_01.298299", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-25T09-47-01.298299.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-25T09-47-01.298299.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_01_25T09_47_01.298299", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-25T09-47-01.298299.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-25T09-47-01.298299.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_01_25T09_47_01.298299", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-25T09-47-01.298299.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-25T09-47-01.298299.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_01_25T09_47_01.298299", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-25T09-47-01.298299.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-25T09-47-01.298299.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_01_25T09_47_01.298299", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-25T09-47-01.298299.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-25T09-47-01.298299.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_01_25T09_47_01.298299", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-25T09-47-01.298299.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-25T09-47-01.298299.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_01_25T09_47_01.298299", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-25T09-47-01.298299.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-25T09-47-01.298299.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_01_25T09_47_01.298299", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-25T09-47-01.298299.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-25T09-47-01.298299.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_01_25T09_47_01.298299", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-25T09-47-01.298299.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-25T09-47-01.298299.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_01_25T09_47_01.298299", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-25T09-47-01.298299.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-25T09-47-01.298299.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_01_25T09_47_01.298299", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-25T09-47-01.298299.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-25T09-47-01.298299.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_01_25T09_47_01.298299", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-25T09-47-01.298299.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-25T09-47-01.298299.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_01_25T09_47_01.298299", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-25T09-47-01.298299.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-25T09-47-01.298299.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_01_25T09_47_01.298299", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-25T09-47-01.298299.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-25T09-47-01.298299.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_01_25T09_47_01.298299", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-25T09-47-01.298299.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-25T09-47-01.298299.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_01_25T09_47_01.298299", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-25T09-47-01.298299.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-25T09-47-01.298299.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_01_25T09_47_01.298299", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-25T09-47-01.298299.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-25T09-47-01.298299.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_01_25T09_47_01.298299", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-25T09-47-01.298299.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-25T09-47-01.298299.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_01_25T09_47_01.298299", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-25T09-47-01.298299.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-25T09-47-01.298299.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_01_25T09_47_01.298299", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-25T09-47-01.298299.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-25T09-47-01.298299.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_01_25T09_47_01.298299", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-25T09-47-01.298299.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-25T09-47-01.298299.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_01_25T09_47_01.298299", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-25T09-47-01.298299.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-25T09-47-01.298299.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_01_25T09_47_01.298299", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-25T09-47-01.298299.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-25T09-47-01.298299.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_01_25T09_47_01.298299", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-25T09-47-01.298299.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-25T09-47-01.298299.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_01_25T09_47_01.298299", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-25T09-47-01.298299.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-25T09-47-01.298299.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_01_25T09_47_01.298299", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-25T09-47-01.298299.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-25T09-47-01.298299.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_01_25T09_47_01.298299", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-25T09-47-01.298299.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-25T09-47-01.298299.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_01_25T09_47_01.298299", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-25T09-47-01.298299.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-25T09-47-01.298299.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_01_25T09_47_01.298299", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-25T09-47-01.298299.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-25T09-47-01.298299.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_01_25T09_47_01.298299", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-25T09-47-01.298299.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-25T09-47-01.298299.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_01_25T09_47_01.298299", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-25T09-47-01.298299.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-25T09-47-01.298299.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_01_25T09_47_01.298299", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-25T09-47-01.298299.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-25T09-47-01.298299.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_01_25T09_47_01.298299", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-25T09-47-01.298299.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-25T09-47-01.298299.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_01_25T09_47_01.298299", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-25T09-47-01.298299.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-25T09-47-01.298299.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_01_25T09_47_01.298299", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-25T09-47-01.298299.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-25T09-47-01.298299.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_01_25T09_47_01.298299", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-25T09-47-01.298299.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-25T09-47-01.298299.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_01_25T09_47_01.298299", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-25T09-47-01.298299.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-25T09-47-01.298299.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_01_25T09_47_01.298299", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-25T09-47-01.298299.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-25T09-47-01.298299.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_01_25T09_47_01.298299", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-25T09-47-01.298299.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-25T09-47-01.298299.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_01_25T09_47_01.298299", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-25T09-47-01.298299.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-25T09-47-01.298299.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_01_25T09_47_01.298299", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-25T09-47-01.298299.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-25T09-47-01.298299.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_01_25T09_47_01.298299", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-25T09-47-01.298299.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-25T09-47-01.298299.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_01_25T09_47_01.298299", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-25T09-47-01.298299.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-25T09-47-01.298299.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_01_25T09_47_01.298299", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-25T09-47-01.298299.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-25T09-47-01.298299.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_01_25T09_47_01.298299", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-25T09-47-01.298299.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-25T09-47-01.298299.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_01_25T09_47_01.298299", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-25T09-47-01.298299.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-25T09-47-01.298299.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_01_25T09_47_01.298299", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-25T09-47-01.298299.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-25T09-47-01.298299.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_01_25T09_47_01.298299", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-25T09-47-01.298299.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-25T09-47-01.298299.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_01_25T09_47_01.298299", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-25T09-47-01.298299.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-25T09-47-01.298299.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_01_25T09_47_01.298299", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-25T09-47-01.298299.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-25T09-47-01.298299.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_01_25T09_47_01.298299", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-25T09-47-01.298299.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-25T09-47-01.298299.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_01_25T09_47_01.298299", "path": ["**/details_harness|winogrande|5_2024-01-25T09-47-01.298299.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-01-25T09-47-01.298299.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_01_25T09_47_01.298299", "path": ["results_2024-01-25T09-47-01.298299.parquet"]}, {"split": "latest", "path": ["results_2024-01-25T09-47-01.298299.parquet"]}]}]}
2024-01-25T09:49:41+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of jsfs11/SnorkelWestBeagle-DARETIES-7B Dataset automatically created during the evaluation run of model jsfs11/SnorkelWestBeagle-DARETIES-7B on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-01-25T09:47:01.298299(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of jsfs11/SnorkelWestBeagle-DARETIES-7B\n\n\n\nDataset automatically created during the evaluation run of model jsfs11/SnorkelWestBeagle-DARETIES-7B on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-25T09:47:01.298299(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of jsfs11/SnorkelWestBeagle-DARETIES-7B\n\n\n\nDataset automatically created during the evaluation run of model jsfs11/SnorkelWestBeagle-DARETIES-7B on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-25T09:47:01.298299(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ 6, 197, 68, 4, 40, 29, 3, 4, 9, 6, 5, 7, 4, 7, 10, 9, 5, 9, 8, 10, 46, 8, 7, 10, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of jsfs11/SnorkelWestBeagle-DARETIES-7B\n\n\n\nDataset automatically created during the evaluation run of model jsfs11/SnorkelWestBeagle-DARETIES-7B on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2024-01-25T09:47:01.298299(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):## Dataset Details### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:## Uses### Direct Use### Out-of-Scope Use## Dataset Structure## Dataset Creation### Curation Rationale### Source Data#### Data Collection and Processing#### Who are the source data producers?### Annotations [optional]#### Annotation process#### Who are the annotators?#### Personal and Sensitive Information## Bias, Risks, and Limitations### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:## Glossary [optional]## More Information [optional]" ]
[ -0.0663067027926445, 0.22908394038677216, -0.005846529267728329, 0.039281412959098816, 0.08593028783798218, -0.0200586449354887, 0.02594570629298687, 0.11238310486078262, -0.00044030859135091305, 0.1575295627117157, -0.01191883347928524, 0.09804344922304153, 0.08629459887742996, 0.1789223998785019, 0.006989582441747189, -0.120672307908535, 0.023094797506928444, -0.07737639546394348, 0.06765224784612656, 0.07938393950462341, 0.09061405062675476, -0.0921664834022522, 0.061225466430187225, -0.0395631305873394, -0.030333468690514565, 0.009011669084429741, -0.07228284329175949, -0.032387424260377884, 0.08485444635152817, 0.0900285467505455, 0.0239868201315403, -0.026824600994586945, 0.018462704494595528, -0.2416549175977707, 0.019059021025896072, 0.07927309721708298, 0.007478053215891123, 0.059097323566675186, 0.1300664097070694, -0.04961635544896126, 0.05540914461016655, -0.07144948095083237, 0.04852967709302902, 0.04135642573237419, -0.11094647645950317, -0.11483409255743027, -0.14504431188106537, 0.02829202450811863, 0.08510061353445053, 0.056219011545181274, -0.02637525275349617, 0.1177542433142662, -0.024546587839722633, 0.043465521186590195, 0.09002348780632019, -0.09177900105714798, -0.031163541600108147, 0.014214118011295795, 0.0064134057611227036, 0.03015921637415886, -0.10800308734178543, -0.025834305211901665, 0.021596383303403854, 0.049914512783288956, 0.02616698108613491, -0.005011383444070816, -0.0791737511754036, 0.011338232085108757, -0.10497134178876877, -0.10314882546663284, 0.1722591519355774, 0.0077009680680930614, -0.04509466141462326, -0.15428617596626282, -0.03522389009594917, 0.012936747632920742, -0.0015796179650351405, -0.024255972355604172, 0.009169064462184906, -0.016802573576569557, 0.07197064906358719, -0.03110678680241108, -0.10064361989498138, -0.008256574161350727, -0.02452835813164711, 0.04489770159125328, 0.004360470920801163, -0.005955084227025509, -0.0036942658480256796, 0.11432874202728271, -0.02163410745561123, -0.10796618461608887, -0.0686790868639946, -0.04089536517858505, -0.11691238731145859, -0.04417913034558296, 0.025064615532755852, -0.07041531056165695, 0.04228072240948677, 0.2233777940273285, -0.053798552602529526, 0.031019527465105057, -0.08880269527435303, -0.0033439777325838804, 0.12492118030786514, 0.06725360453128815, -0.04391823336482048, -0.07974784076213837, -0.002849323907867074, 0.0202944315969944, 0.025925882160663605, -0.018899284303188324, 0.011426056735217571, 0.050201985985040665, 0.04919451102614403, 0.10748545080423355, 0.12866108119487762, 0.008303817361593246, -0.06803905963897705, -0.023131150752305984, 0.20836994051933289, -0.16790033876895905, 0.017986387014389038, 0.012953013181686401, -0.045767948031425476, -0.09355714917182922, 0.05465942993760109, -0.011216824874281883, -0.0725174993276596, 0.10966110229492188, -0.06328020244836807, -0.057441599667072296, -0.08339536935091019, -0.04435483738780022, 0.059654854238033295, -0.01320564467459917, -0.02630419284105301, -0.0681343674659729, -0.13361474871635437, -0.08953869342803955, 0.023209037259221077, -0.07455684244632721, -0.02994927018880844, 0.019886750727891922, -0.02724943682551384, -0.007080593146383762, -0.024628305807709694, 0.13641557097434998, -0.05848107486963272, 0.020557992160320282, 0.0033938102424144745, -0.002016953192651272, 0.07436227798461914, 0.04696765914559364, -0.1242012307047844, 0.07554729282855988, -0.08305028080940247, 0.10960622131824493, -0.09212478995323181, 0.0057458095252513885, -0.12799736857414246, 0.02051246538758278, -0.030547449365258217, 0.015689875930547714, 0.016200508922338486, 0.11077229678630829, -0.2567625045776367, 0.019158724695444107, 0.11706360429525375, -0.10193117707967758, -0.08696195483207703, 0.0637902244925499, -0.0464198961853981, 0.06672408431768417, 0.05583375319838524, 0.09747982770204544, 0.11919229477643967, -0.0620291605591774, -0.12457266449928284, -0.07329488545656204, -0.017185315489768982, 0.11116452515125275, 0.05438657104969025, -0.0657469779253006, 0.13284693658351898, 0.038873136043548584, -0.006416300777345896, -0.06415235996246338, -0.006495858076959848, -0.057587891817092896, -0.018993444740772247, -0.03961602970957756, -0.08321674913167953, 0.0013189878081902862, -0.07825718075037003, -0.026931041851639748, -0.08701744675636292, 0.013898413628339767, 0.08052483946084976, -0.004053712822496891, 0.011459363624453545, -0.06304475665092468, 0.04719071090221405, 0.0007249278714880347, 0.021839605644345284, -0.2165422886610031, -0.09548098593950272, 0.03549857437610626, -0.10031842440366745, 0.05584248900413513, 0.0020030417945235968, 0.015471763908863068, 0.029939718544483185, -0.0017172247171401978, -0.0015383389545604587, 0.02134893834590912, -0.008733698166906834, -0.01207402441650629, -0.1375282257795334, -0.05044466257095337, -0.08672013133764267, 0.05230216309428215, -0.13861803710460663, -0.019268495962023735, 0.076093889772892, 0.16916032135486603, 0.01434844359755516, -0.08755957335233688, 0.07512054592370987, -0.0076079340651631355, -0.043484970927238464, -0.06803273409605026, -0.009507106617093086, -0.010940814390778542, 0.05686928704380989, 0.025624960660934448, -0.1830098032951355, -0.12501764297485352, 0.06669013947248459, 0.12320299446582794, -0.06900063157081604, -0.056684236973524094, -0.06500458717346191, -0.05614883825182915, -0.09921888262033463, -0.06867236644029617, 0.08666329085826874, 0.08321826159954071, 0.029695644974708557, -0.060382187366485596, -0.07735103368759155, -0.004008407238870859, 0.03621944040060043, -0.07002341747283936, 0.09289630502462387, 0.049930740147829056, -0.09865397959947586, 0.11275722086429596, 0.017381245270371437, 0.11124908179044724, 0.09261798113584518, 0.003873659297823906, -0.10512609779834747, -0.013942983001470566, 0.04995754361152649, 0.02393735945224762, 0.08929671347141266, -0.028640154749155045, 0.03443491458892822, 0.07918431609869003, -0.005783398170024157, 0.05198949947953224, -0.05963432416319847, 0.04168509319424629, 0.03160792961716652, 0.002482837764546275, 0.03391251340508461, 0.0009823647560551763, 0.013787360861897469, 0.06535124778747559, 0.023830411955714226, 0.10230102390050888, -0.011269625276327133, -0.046185024082660675, -0.09301696717739105, 0.1438373476266861, -0.10174603015184402, -0.24863164126873016, -0.15986016392707825, -0.03492363542318344, -0.029511230066418648, -0.007331884931772947, 0.04765889421105385, -0.004163544159382582, -0.10424242913722992, -0.1006309911608696, 0.04537409916520119, 0.035399094223976135, -0.12308672815561295, -0.034590911120176315, 0.040750935673713684, 0.015842769294977188, -0.15431074798107147, 0.02978665567934513, 0.05002264678478241, -0.022704800590872765, -0.003872755216434598, 0.11021507531404495, 0.12310583144426346, 0.07283034920692444, 0.036787912249565125, -0.020731883123517036, 0.007260153070092201, 0.2037365883588791, -0.09946683049201965, 0.05457230657339096, 0.1236366257071495, -0.04924305900931358, 0.06789078563451767, 0.17424236238002777, 0.008427908644080162, -0.08288587629795074, 0.03312554955482483, 0.08133325725793839, -0.05755199119448662, -0.2606801986694336, -0.07316915690898895, -0.03417036682367325, -0.01166502945125103, 0.08879547566175461, 0.06674697250127792, -0.002614059019833803, 0.02781921625137329, -0.10315251350402832, -0.025643905624747276, -0.04551643878221512, 0.06150270253419876, 0.0732199102640152, 0.011946823447942734, 0.044532399624586105, -0.04057324677705765, 0.04932019114494324, 0.113533154129982, 0.04315619170665741, 0.15678682923316956, -0.04474981501698494, 0.152278870344162, 0.07807208597660065, 0.07616975158452988, -0.046814724802970886, 0.06956319510936737, 0.0012509969528764486, 0.062291618436574936, -0.0017587963957339525, -0.09711460769176483, -0.06364217400550842, 0.0843658298254013, 0.008006841875612736, -0.032776907086372375, 0.04262155294418335, -0.02316141314804554, 0.04549350589513779, 0.1421041041612625, 0.0064403400756418705, -0.14228899776935577, -0.07029988616704941, 0.06334807723760605, -0.03747062385082245, -0.11133217811584473, -0.022228188812732697, 0.08942730724811554, -0.13735857605934143, 0.009566514752805233, -0.024985171854496002, 0.09747561067342758, -0.12116929143667221, -0.024840977042913437, -0.026594076305627823, 0.09207569062709808, -0.008635655976831913, 0.1100093424320221, -0.13995201885700226, 0.09879972040653229, 0.0027055370155721903, 0.0564679279923439, -0.08218443393707275, 0.07896524667739868, -0.01905573159456253, -0.057828016579151154, 0.13834244012832642, -0.005739565938711166, -0.13855518400669098, -0.037048693746328354, -0.1238417699933052, -0.0035731440875679255, 0.041343484073877335, -0.10128196328878403, 0.10248050838708878, 0.024336332455277443, -0.015632443130016327, -0.032656554132699966, -0.01406282838433981, -0.12770012021064758, -0.2241484671831131, 0.09479166567325592, -0.09898070991039276, 0.05924462154507637, -0.04433418810367584, -0.035428375005722046, -0.022617170587182045, 0.1696770042181015, -0.08296138793230057, -0.07507576048374176, -0.13362747430801392, 0.032665327191352844, 0.18123209476470947, -0.06748217344284058, 0.04814587160944939, -0.04714391380548477, 0.17707838118076324, -0.0076210759580135345, -0.05699148401618004, -0.002430209657177329, -0.0906895250082016, -0.1395552158355713, -0.04416600614786148, 0.13942234218120575, 0.053715094923973083, 0.012258142232894897, 0.011294351890683174, 0.039958834648132324, 0.0038276389241218567, -0.08635234087705612, 0.0477835014462471, 0.07933375984430313, 0.11772296577692032, 0.03651187941431999, -0.03330269083380699, -0.15247376263141632, -0.10258330404758453, -0.0810321494936943, 0.0643182024359703, 0.143101304769516, -0.0546412467956543, 0.13569653034210205, 0.1248011365532875, -0.10146909207105637, -0.1867385357618332, -0.04458558186888695, 0.02389857918024063, -0.006529204081743956, 0.09862980246543884, -0.19684453308582306, 0.07214636355638504, 0.062092408537864685, -0.011383735574781895, 0.1217912882566452, -0.2106260508298874, -0.13086043298244476, 0.045643337070941925, 0.02501433528959751, -0.2064548283815384, -0.1491335779428482, -0.11747193336486816, -0.015714265406131744, -0.17634820938110352, 0.14125989377498627, 0.003767061745747924, 0.017328454181551933, -0.003835168667137623, 0.08249121904373169, 0.05157420039176941, -0.06232611835002899, 0.12478134781122208, 0.01645101048052311, 0.033746808767318726, -0.09111997485160828, -0.04363788664340973, 0.01108291931450367, -0.05640336498618126, 0.09449061751365662, 0.035483140498399734, 0.051223769783973694, -0.10699493438005447, -0.026663888245821, -0.05597789213061333, 0.056535594165325165, -0.0719073936343193, -0.058027058839797974, -0.04491746425628662, 0.08325573801994324, 0.07609828561544418, -0.02179361879825592, 0.0457083135843277, -0.03247487172484398, 0.03075587935745716, 0.24522273242473602, 0.09333035349845886, 0.029443155974149704, -0.09595102816820145, -0.022746684029698372, -0.010589954443275928, -0.0031631411984562874, -0.14181490242481232, 0.04963120445609093, 0.09232889115810394, 0.03756081685423851, 0.08572886139154434, -0.02782866545021534, -0.1786661446094513, 0.000991141889244318, 0.07986084371805191, -0.11224442720413208, -0.18648207187652588, 0.021099630743265152, 0.11347803473472595, -0.13936381042003632, -0.05504240840673447, 0.09586122632026672, 0.027016350999474525, -0.026602881029248238, 0.00793648324906826, 0.07324596494436264, 0.04913484305143356, 0.11878415942192078, 0.009769667871296406, 0.0523824542760849, -0.07748455554246902, 0.1099059209227562, 0.1343412846326828, -0.09500227868556976, 0.01937675289809704, 0.07663070410490036, -0.05398580804467201, -0.05636615678668022, 0.012176400981843472, 0.030331730842590332, 0.02000562474131584, -0.04158009961247444, 0.023824144154787064, -0.033049117773771286, 0.05887974053621292, 0.11245111376047134, -0.00429412629455328, 0.03951239958405495, 0.01677345484495163, -0.014084023423492908, -0.07105797529220581, 0.12001971155405045, 0.06869877129793167, 0.03890686109662056, -0.04618236795067787, 0.03126281499862671, -0.0028737636748701334, -0.011025656946003437, 0.01706659607589245, -0.03651000186800957, -0.04115365073084831, -0.010909073054790497, -0.16190621256828308, 0.03187659755349159, -0.09655113518238068, -0.010494220070540905, -0.007453406695276499, -0.024107061326503754, -0.020046627148985863, 0.023566126823425293, -0.05599316954612732, -0.06081245839595795, -0.0486665703356266, 0.10253241658210754, -0.19894318282604218, -0.006376380100846291, 0.08111391961574554, -0.07539964467287064, 0.07895810902118683, 0.02458956465125084, -0.01126575656235218, 0.008397876285016537, -0.09238607436418533, -0.03153382986783981, -0.025724589824676514, 0.04854993149638176, 0.028879044577479362, -0.1556035280227661, -0.009777870029211044, 0.015083830803632736, -0.06986535340547562, -0.024276355281472206, 0.04523599520325661, -0.14408992230892181, 0.03618469834327698, 0.0638192743062973, -0.04341092333197594, -0.04479746147990227, 0.05819397792220116, 0.07124781608581543, 0.006930823437869549, 0.11463097482919693, -0.005555443000048399, 0.04805640131235123, -0.15801778435707092, -0.03544812276959419, 0.0026360624469816685, -0.0021275272592902184, 0.0016149211442098022, 0.014057163149118423, 0.04575742781162262, -0.01210673339664936, 0.17787037789821625, -0.01617712341248989, 0.06450378149747849, 0.03195313364267349, -0.002223724965006113, -0.05221942812204361, 0.030827723443508148, 0.04369714483618736, -0.009755718521773815, 0.010226989164948463, 0.025114787742495537, -0.03760836273431778, -0.036455877125263214, -0.03795574605464935, 0.089313805103302, 0.14654290676116943, 0.15591195225715637, -0.011631479486823082, 0.06363366544246674, -0.15262307226657867, -0.06135949119925499, 0.029690423980355263, -0.048699211329221725, 0.033205535262823105, -0.07651209831237793, 0.042890727519989014, 0.07482105493545532, -0.13562293350696564, 0.133416548371315, -0.057191360741853714, -0.04494046792387962, -0.028895003721117973, -0.17100991308689117, -0.04251086711883545, 0.02497602440416813, 0.008821044117212296, -0.10779997706413269, 0.1069505363702774, 0.10804335027933121, -0.011778063140809536, -0.023221474140882492, 0.1033966988325119, -0.06343346834182739, -0.07063934206962585, -0.013906198553740978, 0.024947039783000946, 0.02284364029765129, -0.005584582686424255, 0.08781521022319794, 0.007121468428522348, 0.07401826232671738, 0.07373465597629547, 0.09141561388969421, 0.07869457453489304, 0.018036801367998123, -0.03475579619407654, -0.06145617365837097, -0.003368756268173456, -0.014221800491213799, -0.05075712874531746, 0.17797048389911652, 0.04840641841292381, 0.02027660608291626, 0.012507454492151737, 0.2120009809732437, -0.012848634272813797, -0.0767565369606018, -0.13887321949005127, 0.06922651082277298, 0.0022453118581324816, 0.019512930884957314, 0.03593415766954422, -0.1414814591407776, 0.03547097370028496, 0.15501445531845093, 0.09814204275608063, 0.01886723004281521, 0.006549412850290537, 0.032660674303770065, 0.025608044117689133, -0.04738907888531685, 0.03963586315512657, 0.03022201731801033, 0.17584002017974854, -0.04461919888854027, 0.05491682142019272, -0.014801031909883022, -0.030174221843481064, -0.025686170905828476, 0.0959000438451767, -0.058079805225133896, 0.01702917367219925, -0.05676060542464256, 0.08841916918754578, -0.04978875070810318, -0.2845412492752075, -0.017303815111517906, -0.09015771746635437, -0.13205847144126892, -0.023320859298110008, 0.04777594655752182, -0.014638612046837807, 0.03254219889640808, 0.03920336440205574, -0.012655265629291534, 0.20226307213306427, 0.011207767762243748, -0.06806331872940063, -0.048185136169195175, 0.07009085267782211, -0.053576841950416565, 0.24644950032234192, 0.003162349108606577, 0.0483550988137722, 0.09710098057985306, -0.020165633410215378, -0.1733042448759079, 0.020741883665323257, 0.10270597040653229, -0.02843618020415306, 0.05503571778535843, 0.1626404970884323, -0.0150645412504673, 0.11882168799638748, 0.051825813949108124, -0.009578060358762741, 0.03803854435682297, 0.05845658853650093, 0.027417486533522606, -0.09014835208654404, 0.06616925448179245, -0.08127399533987045, 0.13505291938781738, 0.10830693691968918, -0.028049053624272346, -0.003909140359610319, -0.06523679941892624, 0.06950706243515015, -0.022121448069810867, 0.13202489912509918, -0.012997985817492008, -0.1532890349626541, 0.04821034148335457, 0.04743177443742752, 0.06682192534208298, -0.2044529914855957, -0.06276155263185501, 0.09872957319021225, -0.05232984572649002, -0.014216731302440166, 0.11461467295885086, 0.039615362882614136, 0.02241644449532032, -0.05637146532535553, -0.0753631442785263, -0.00043584260856732726, 0.11173567920923233, -0.06285550445318222, -0.026692165061831474 ]
d8f2819969dd2b5dc207066acd33d5dea1213be8
# Dataset Card for Evaluation run of cognitivecomputations/TinyDolphin-2.8.1-1.1b <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [cognitivecomputations/TinyDolphin-2.8.1-1.1b](https://huggingface.co/cognitivecomputations/TinyDolphin-2.8.1-1.1b) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_cognitivecomputations__TinyDolphin-2.8.1-1.1b", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-01-25T09:58:55.785589](https://huggingface.co/datasets/open-llm-leaderboard/details_cognitivecomputations__TinyDolphin-2.8.1-1.1b/blob/main/results_2024-01-25T09-58-55.785589.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.25954316457863047, "acc_stderr": 0.030732800295881258, "acc_norm": 0.2606565852109988, "acc_norm_stderr": 0.03150616236747666, "mc1": 0.20807833537331702, "mc1_stderr": 0.01421050347357662, "mc2": 0.3550954012841355, "mc2_stderr": 0.014299607014922852 }, "harness|arc:challenge|25": { "acc": 0.3250853242320819, "acc_stderr": 0.013688147309729117, "acc_norm": 0.34982935153583616, "acc_norm_stderr": 0.013936809212158282 }, "harness|hellaswag|10": { "acc": 0.4654451304521012, "acc_stderr": 0.004977851161904398, "acc_norm": 0.6010754829715196, "acc_norm_stderr": 0.004886764243204046 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.31, "acc_stderr": 0.04648231987117316, "acc_norm": 0.31, "acc_norm_stderr": 0.04648231987117316 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.2740740740740741, "acc_stderr": 0.03853254836552003, "acc_norm": 0.2740740740740741, "acc_norm_stderr": 0.03853254836552003 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.20394736842105263, "acc_stderr": 0.032790004063100495, "acc_norm": 0.20394736842105263, "acc_norm_stderr": 0.032790004063100495 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.25, "acc_stderr": 0.04351941398892446, "acc_norm": 0.25, "acc_norm_stderr": 0.04351941398892446 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.25660377358490566, "acc_stderr": 0.026880647889051965, "acc_norm": 0.25660377358490566, "acc_norm_stderr": 0.026880647889051965 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.19444444444444445, "acc_stderr": 0.03309615177059008, "acc_norm": 0.19444444444444445, "acc_norm_stderr": 0.03309615177059008 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.16, "acc_stderr": 0.03684529491774709, "acc_norm": 0.16, "acc_norm_stderr": 0.03684529491774709 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.26, "acc_stderr": 0.0440844002276808, "acc_norm": 0.26, "acc_norm_stderr": 0.0440844002276808 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.21, "acc_stderr": 0.040936018074033256, "acc_norm": 0.21, "acc_norm_stderr": 0.040936018074033256 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.2254335260115607, "acc_stderr": 0.03186209851641142, "acc_norm": 0.2254335260115607, "acc_norm_stderr": 0.03186209851641142 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.17647058823529413, "acc_stderr": 0.03793281185307811, "acc_norm": 0.17647058823529413, "acc_norm_stderr": 0.03793281185307811 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.27, "acc_stderr": 0.044619604333847415, "acc_norm": 0.27, "acc_norm_stderr": 0.044619604333847415 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.2765957446808511, "acc_stderr": 0.029241883869628817, "acc_norm": 0.2765957446808511, "acc_norm_stderr": 0.029241883869628817 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.2894736842105263, "acc_stderr": 0.04266339443159394, "acc_norm": 0.2894736842105263, "acc_norm_stderr": 0.04266339443159394 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.2620689655172414, "acc_stderr": 0.036646663372252565, "acc_norm": 0.2620689655172414, "acc_norm_stderr": 0.036646663372252565 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.25925925925925924, "acc_stderr": 0.02256989707491841, "acc_norm": 0.25925925925925924, "acc_norm_stderr": 0.02256989707491841 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.1746031746031746, "acc_stderr": 0.0339549002085611, "acc_norm": 0.1746031746031746, "acc_norm_stderr": 0.0339549002085611 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.3, "acc_stderr": 0.046056618647183814, "acc_norm": 0.3, "acc_norm_stderr": 0.046056618647183814 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.24838709677419354, "acc_stderr": 0.024580028921481006, "acc_norm": 0.24838709677419354, "acc_norm_stderr": 0.024580028921481006 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.270935960591133, "acc_stderr": 0.031270907132976984, "acc_norm": 0.270935960591133, "acc_norm_stderr": 0.031270907132976984 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.28, "acc_stderr": 0.04512608598542128, "acc_norm": 0.28, "acc_norm_stderr": 0.04512608598542128 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.21818181818181817, "acc_stderr": 0.03225078108306289, "acc_norm": 0.21818181818181817, "acc_norm_stderr": 0.03225078108306289 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.2474747474747475, "acc_stderr": 0.030746300742124488, "acc_norm": 0.2474747474747475, "acc_norm_stderr": 0.030746300742124488 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.22797927461139897, "acc_stderr": 0.030276909945178256, "acc_norm": 0.22797927461139897, "acc_norm_stderr": 0.030276909945178256 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.2153846153846154, "acc_stderr": 0.020843034557462878, "acc_norm": 0.2153846153846154, "acc_norm_stderr": 0.020843034557462878 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.24814814814814815, "acc_stderr": 0.026335739404055803, "acc_norm": 0.24814814814814815, "acc_norm_stderr": 0.026335739404055803 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.21428571428571427, "acc_stderr": 0.02665353159671548, "acc_norm": 0.21428571428571427, "acc_norm_stderr": 0.02665353159671548 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.2251655629139073, "acc_stderr": 0.03410435282008936, "acc_norm": 0.2251655629139073, "acc_norm_stderr": 0.03410435282008936 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.24770642201834864, "acc_stderr": 0.018508143602547822, "acc_norm": 0.24770642201834864, "acc_norm_stderr": 0.018508143602547822 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.4305555555555556, "acc_stderr": 0.03376922151252335, "acc_norm": 0.4305555555555556, "acc_norm_stderr": 0.03376922151252335 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.28921568627450983, "acc_stderr": 0.03182231867647553, "acc_norm": 0.28921568627450983, "acc_norm_stderr": 0.03182231867647553 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.27848101265822783, "acc_stderr": 0.029178682304842548, "acc_norm": 0.27848101265822783, "acc_norm_stderr": 0.029178682304842548 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.34977578475336324, "acc_stderr": 0.03200736719484503, "acc_norm": 0.34977578475336324, "acc_norm_stderr": 0.03200736719484503 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.21374045801526717, "acc_stderr": 0.0359546161177469, "acc_norm": 0.21374045801526717, "acc_norm_stderr": 0.0359546161177469 }, "harness|hendrycksTest-international_law|5": { "acc": 0.3305785123966942, "acc_stderr": 0.04294340845212094, "acc_norm": 0.3305785123966942, "acc_norm_stderr": 0.04294340845212094 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.25, "acc_stderr": 0.04186091791394607, "acc_norm": 0.25, "acc_norm_stderr": 0.04186091791394607 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.3128834355828221, "acc_stderr": 0.03642914578292404, "acc_norm": 0.3128834355828221, "acc_norm_stderr": 0.03642914578292404 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.26785714285714285, "acc_stderr": 0.04203277291467764, "acc_norm": 0.26785714285714285, "acc_norm_stderr": 0.04203277291467764 }, "harness|hendrycksTest-management|5": { "acc": 0.2524271844660194, "acc_stderr": 0.04301250399690877, "acc_norm": 0.2524271844660194, "acc_norm_stderr": 0.04301250399690877 }, "harness|hendrycksTest-marketing|5": { "acc": 0.2564102564102564, "acc_stderr": 0.028605953702004253, "acc_norm": 0.2564102564102564, "acc_norm_stderr": 0.028605953702004253 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.21, "acc_stderr": 0.04093601807403326, "acc_norm": 0.21, "acc_norm_stderr": 0.04093601807403326 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.27586206896551724, "acc_stderr": 0.015982814774695632, "acc_norm": 0.27586206896551724, "acc_norm_stderr": 0.015982814774695632 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.29190751445086704, "acc_stderr": 0.024476994076247337, "acc_norm": 0.29190751445086704, "acc_norm_stderr": 0.024476994076247337 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.24692737430167597, "acc_stderr": 0.014422292204808835, "acc_norm": 0.24692737430167597, "acc_norm_stderr": 0.014422292204808835 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.23529411764705882, "acc_stderr": 0.024288619466046102, "acc_norm": 0.23529411764705882, "acc_norm_stderr": 0.024288619466046102 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.31511254019292606, "acc_stderr": 0.026385273703464492, "acc_norm": 0.31511254019292606, "acc_norm_stderr": 0.026385273703464492 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.29012345679012347, "acc_stderr": 0.02525117393649502, "acc_norm": 0.29012345679012347, "acc_norm_stderr": 0.02525117393649502 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.2907801418439716, "acc_stderr": 0.027090664368353178, "acc_norm": 0.2907801418439716, "acc_norm_stderr": 0.027090664368353178 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.27183833116036504, "acc_stderr": 0.011363135278651411, "acc_norm": 0.27183833116036504, "acc_norm_stderr": 0.011363135278651411 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.17279411764705882, "acc_stderr": 0.022966067585581756, "acc_norm": 0.17279411764705882, "acc_norm_stderr": 0.022966067585581756 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.2647058823529412, "acc_stderr": 0.017848089574913226, "acc_norm": 0.2647058823529412, "acc_norm_stderr": 0.017848089574913226 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.2636363636363636, "acc_stderr": 0.04220224692971987, "acc_norm": 0.2636363636363636, "acc_norm_stderr": 0.04220224692971987 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.17551020408163265, "acc_stderr": 0.024352800722970015, "acc_norm": 0.17551020408163265, "acc_norm_stderr": 0.024352800722970015 }, "harness|hendrycksTest-sociology|5": { "acc": 0.23383084577114427, "acc_stderr": 0.029929415408348377, "acc_norm": 0.23383084577114427, "acc_norm_stderr": 0.029929415408348377 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.21, "acc_stderr": 0.040936018074033256, "acc_norm": 0.21, "acc_norm_stderr": 0.040936018074033256 }, "harness|hendrycksTest-virology|5": { "acc": 0.22289156626506024, "acc_stderr": 0.03240004825594687, "acc_norm": 0.22289156626506024, "acc_norm_stderr": 0.03240004825594687 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.22807017543859648, "acc_stderr": 0.03218093795602357, "acc_norm": 0.22807017543859648, "acc_norm_stderr": 0.03218093795602357 }, "harness|truthfulqa:mc|0": { "mc1": 0.20807833537331702, "mc1_stderr": 0.01421050347357662, "mc2": 0.3550954012841355, "mc2_stderr": 0.014299607014922852 }, "harness|winogrande|5": { "acc": 0.6069455406471981, "acc_stderr": 0.013727276249108454 }, "harness|gsm8k|5": { "acc": 0.006823351023502654, "acc_stderr": 0.0022675371022544987 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_cognitivecomputations__TinyDolphin-2.8.1-1.1b
[ "region:us" ]
2024-01-25T10:01:20+00:00
{"pretty_name": "Evaluation run of cognitivecomputations/TinyDolphin-2.8.1-1.1b", "dataset_summary": "Dataset automatically created during the evaluation run of model [cognitivecomputations/TinyDolphin-2.8.1-1.1b](https://huggingface.co/cognitivecomputations/TinyDolphin-2.8.1-1.1b) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_cognitivecomputations__TinyDolphin-2.8.1-1.1b\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-01-25T09:58:55.785589](https://huggingface.co/datasets/open-llm-leaderboard/details_cognitivecomputations__TinyDolphin-2.8.1-1.1b/blob/main/results_2024-01-25T09-58-55.785589.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.25954316457863047,\n \"acc_stderr\": 0.030732800295881258,\n \"acc_norm\": 0.2606565852109988,\n \"acc_norm_stderr\": 0.03150616236747666,\n \"mc1\": 0.20807833537331702,\n \"mc1_stderr\": 0.01421050347357662,\n \"mc2\": 0.3550954012841355,\n \"mc2_stderr\": 0.014299607014922852\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.3250853242320819,\n \"acc_stderr\": 0.013688147309729117,\n \"acc_norm\": 0.34982935153583616,\n \"acc_norm_stderr\": 0.013936809212158282\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.4654451304521012,\n \"acc_stderr\": 0.004977851161904398,\n \"acc_norm\": 0.6010754829715196,\n \"acc_norm_stderr\": 0.004886764243204046\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.31,\n \"acc_stderr\": 0.04648231987117316,\n \"acc_norm\": 0.31,\n \"acc_norm_stderr\": 0.04648231987117316\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.2740740740740741,\n \"acc_stderr\": 0.03853254836552003,\n \"acc_norm\": 0.2740740740740741,\n \"acc_norm_stderr\": 0.03853254836552003\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.20394736842105263,\n \"acc_stderr\": 0.032790004063100495,\n \"acc_norm\": 0.20394736842105263,\n \"acc_norm_stderr\": 0.032790004063100495\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.25,\n \"acc_stderr\": 0.04351941398892446,\n \"acc_norm\": 0.25,\n \"acc_norm_stderr\": 0.04351941398892446\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.25660377358490566,\n \"acc_stderr\": 0.026880647889051965,\n \"acc_norm\": 0.25660377358490566,\n \"acc_norm_stderr\": 0.026880647889051965\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.19444444444444445,\n \"acc_stderr\": 0.03309615177059008,\n \"acc_norm\": 0.19444444444444445,\n \"acc_norm_stderr\": 0.03309615177059008\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.16,\n \"acc_stderr\": 0.03684529491774709,\n \"acc_norm\": 0.16,\n \"acc_norm_stderr\": 0.03684529491774709\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.26,\n \"acc_stderr\": 0.0440844002276808,\n \"acc_norm\": 0.26,\n \"acc_norm_stderr\": 0.0440844002276808\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.21,\n \"acc_stderr\": 0.040936018074033256,\n \"acc_norm\": 0.21,\n \"acc_norm_stderr\": 0.040936018074033256\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.2254335260115607,\n \"acc_stderr\": 0.03186209851641142,\n \"acc_norm\": 0.2254335260115607,\n \"acc_norm_stderr\": 0.03186209851641142\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.17647058823529413,\n \"acc_stderr\": 0.03793281185307811,\n \"acc_norm\": 0.17647058823529413,\n \"acc_norm_stderr\": 0.03793281185307811\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.27,\n \"acc_stderr\": 0.044619604333847415,\n \"acc_norm\": 0.27,\n \"acc_norm_stderr\": 0.044619604333847415\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.2765957446808511,\n \"acc_stderr\": 0.029241883869628817,\n \"acc_norm\": 0.2765957446808511,\n \"acc_norm_stderr\": 0.029241883869628817\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.2894736842105263,\n \"acc_stderr\": 0.04266339443159394,\n \"acc_norm\": 0.2894736842105263,\n \"acc_norm_stderr\": 0.04266339443159394\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.2620689655172414,\n \"acc_stderr\": 0.036646663372252565,\n \"acc_norm\": 0.2620689655172414,\n \"acc_norm_stderr\": 0.036646663372252565\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.25925925925925924,\n \"acc_stderr\": 0.02256989707491841,\n \"acc_norm\": 0.25925925925925924,\n \"acc_norm_stderr\": 0.02256989707491841\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.1746031746031746,\n \"acc_stderr\": 0.0339549002085611,\n \"acc_norm\": 0.1746031746031746,\n \"acc_norm_stderr\": 0.0339549002085611\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.3,\n \"acc_stderr\": 0.046056618647183814,\n \"acc_norm\": 0.3,\n \"acc_norm_stderr\": 0.046056618647183814\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.24838709677419354,\n \"acc_stderr\": 0.024580028921481006,\n \"acc_norm\": 0.24838709677419354,\n \"acc_norm_stderr\": 0.024580028921481006\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.270935960591133,\n \"acc_stderr\": 0.031270907132976984,\n \"acc_norm\": 0.270935960591133,\n \"acc_norm_stderr\": 0.031270907132976984\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.28,\n \"acc_stderr\": 0.04512608598542128,\n \"acc_norm\": 0.28,\n \"acc_norm_stderr\": 0.04512608598542128\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.21818181818181817,\n \"acc_stderr\": 0.03225078108306289,\n \"acc_norm\": 0.21818181818181817,\n \"acc_norm_stderr\": 0.03225078108306289\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.2474747474747475,\n \"acc_stderr\": 0.030746300742124488,\n \"acc_norm\": 0.2474747474747475,\n \"acc_norm_stderr\": 0.030746300742124488\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.22797927461139897,\n \"acc_stderr\": 0.030276909945178256,\n \"acc_norm\": 0.22797927461139897,\n \"acc_norm_stderr\": 0.030276909945178256\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.2153846153846154,\n \"acc_stderr\": 0.020843034557462878,\n \"acc_norm\": 0.2153846153846154,\n \"acc_norm_stderr\": 0.020843034557462878\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.24814814814814815,\n \"acc_stderr\": 0.026335739404055803,\n \"acc_norm\": 0.24814814814814815,\n \"acc_norm_stderr\": 0.026335739404055803\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.21428571428571427,\n \"acc_stderr\": 0.02665353159671548,\n \"acc_norm\": 0.21428571428571427,\n \"acc_norm_stderr\": 0.02665353159671548\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.2251655629139073,\n \"acc_stderr\": 0.03410435282008936,\n \"acc_norm\": 0.2251655629139073,\n \"acc_norm_stderr\": 0.03410435282008936\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.24770642201834864,\n \"acc_stderr\": 0.018508143602547822,\n \"acc_norm\": 0.24770642201834864,\n \"acc_norm_stderr\": 0.018508143602547822\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.4305555555555556,\n \"acc_stderr\": 0.03376922151252335,\n \"acc_norm\": 0.4305555555555556,\n \"acc_norm_stderr\": 0.03376922151252335\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.28921568627450983,\n \"acc_stderr\": 0.03182231867647553,\n \"acc_norm\": 0.28921568627450983,\n \"acc_norm_stderr\": 0.03182231867647553\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.27848101265822783,\n \"acc_stderr\": 0.029178682304842548,\n \"acc_norm\": 0.27848101265822783,\n \"acc_norm_stderr\": 0.029178682304842548\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.34977578475336324,\n \"acc_stderr\": 0.03200736719484503,\n \"acc_norm\": 0.34977578475336324,\n \"acc_norm_stderr\": 0.03200736719484503\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.21374045801526717,\n \"acc_stderr\": 0.0359546161177469,\n \"acc_norm\": 0.21374045801526717,\n \"acc_norm_stderr\": 0.0359546161177469\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.3305785123966942,\n \"acc_stderr\": 0.04294340845212094,\n \"acc_norm\": 0.3305785123966942,\n \"acc_norm_stderr\": 0.04294340845212094\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.25,\n \"acc_stderr\": 0.04186091791394607,\n \"acc_norm\": 0.25,\n \"acc_norm_stderr\": 0.04186091791394607\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.3128834355828221,\n \"acc_stderr\": 0.03642914578292404,\n \"acc_norm\": 0.3128834355828221,\n \"acc_norm_stderr\": 0.03642914578292404\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.26785714285714285,\n \"acc_stderr\": 0.04203277291467764,\n \"acc_norm\": 0.26785714285714285,\n \"acc_norm_stderr\": 0.04203277291467764\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.2524271844660194,\n \"acc_stderr\": 0.04301250399690877,\n \"acc_norm\": 0.2524271844660194,\n \"acc_norm_stderr\": 0.04301250399690877\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.2564102564102564,\n \"acc_stderr\": 0.028605953702004253,\n \"acc_norm\": 0.2564102564102564,\n \"acc_norm_stderr\": 0.028605953702004253\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.21,\n \"acc_stderr\": 0.04093601807403326,\n \"acc_norm\": 0.21,\n \"acc_norm_stderr\": 0.04093601807403326\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.27586206896551724,\n \"acc_stderr\": 0.015982814774695632,\n \"acc_norm\": 0.27586206896551724,\n \"acc_norm_stderr\": 0.015982814774695632\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.29190751445086704,\n \"acc_stderr\": 0.024476994076247337,\n \"acc_norm\": 0.29190751445086704,\n \"acc_norm_stderr\": 0.024476994076247337\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.24692737430167597,\n \"acc_stderr\": 0.014422292204808835,\n \"acc_norm\": 0.24692737430167597,\n \"acc_norm_stderr\": 0.014422292204808835\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.23529411764705882,\n \"acc_stderr\": 0.024288619466046102,\n \"acc_norm\": 0.23529411764705882,\n \"acc_norm_stderr\": 0.024288619466046102\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.31511254019292606,\n \"acc_stderr\": 0.026385273703464492,\n \"acc_norm\": 0.31511254019292606,\n \"acc_norm_stderr\": 0.026385273703464492\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.29012345679012347,\n \"acc_stderr\": 0.02525117393649502,\n \"acc_norm\": 0.29012345679012347,\n \"acc_norm_stderr\": 0.02525117393649502\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.2907801418439716,\n \"acc_stderr\": 0.027090664368353178,\n \"acc_norm\": 0.2907801418439716,\n \"acc_norm_stderr\": 0.027090664368353178\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.27183833116036504,\n \"acc_stderr\": 0.011363135278651411,\n \"acc_norm\": 0.27183833116036504,\n \"acc_norm_stderr\": 0.011363135278651411\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.17279411764705882,\n \"acc_stderr\": 0.022966067585581756,\n \"acc_norm\": 0.17279411764705882,\n \"acc_norm_stderr\": 0.022966067585581756\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.2647058823529412,\n \"acc_stderr\": 0.017848089574913226,\n \"acc_norm\": 0.2647058823529412,\n \"acc_norm_stderr\": 0.017848089574913226\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.2636363636363636,\n \"acc_stderr\": 0.04220224692971987,\n \"acc_norm\": 0.2636363636363636,\n \"acc_norm_stderr\": 0.04220224692971987\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.17551020408163265,\n \"acc_stderr\": 0.024352800722970015,\n \"acc_norm\": 0.17551020408163265,\n \"acc_norm_stderr\": 0.024352800722970015\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.23383084577114427,\n \"acc_stderr\": 0.029929415408348377,\n \"acc_norm\": 0.23383084577114427,\n \"acc_norm_stderr\": 0.029929415408348377\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.21,\n \"acc_stderr\": 0.040936018074033256,\n \"acc_norm\": 0.21,\n \"acc_norm_stderr\": 0.040936018074033256\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.22289156626506024,\n \"acc_stderr\": 0.03240004825594687,\n \"acc_norm\": 0.22289156626506024,\n \"acc_norm_stderr\": 0.03240004825594687\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.22807017543859648,\n \"acc_stderr\": 0.03218093795602357,\n \"acc_norm\": 0.22807017543859648,\n \"acc_norm_stderr\": 0.03218093795602357\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.20807833537331702,\n \"mc1_stderr\": 0.01421050347357662,\n \"mc2\": 0.3550954012841355,\n \"mc2_stderr\": 0.014299607014922852\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.6069455406471981,\n \"acc_stderr\": 0.013727276249108454\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.006823351023502654,\n \"acc_stderr\": 0.0022675371022544987\n }\n}\n```", "repo_url": "https://huggingface.co/cognitivecomputations/TinyDolphin-2.8.1-1.1b", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_01_25T09_58_55.785589", "path": ["**/details_harness|arc:challenge|25_2024-01-25T09-58-55.785589.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-01-25T09-58-55.785589.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_01_25T09_58_55.785589", "path": ["**/details_harness|gsm8k|5_2024-01-25T09-58-55.785589.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-01-25T09-58-55.785589.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_01_25T09_58_55.785589", "path": ["**/details_harness|hellaswag|10_2024-01-25T09-58-55.785589.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-01-25T09-58-55.785589.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_01_25T09_58_55.785589", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-25T09-58-55.785589.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-25T09-58-55.785589.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-25T09-58-55.785589.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-25T09-58-55.785589.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-25T09-58-55.785589.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-25T09-58-55.785589.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-25T09-58-55.785589.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-25T09-58-55.785589.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-25T09-58-55.785589.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-25T09-58-55.785589.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-25T09-58-55.785589.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-25T09-58-55.785589.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-25T09-58-55.785589.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-25T09-58-55.785589.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-25T09-58-55.785589.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-25T09-58-55.785589.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-25T09-58-55.785589.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-25T09-58-55.785589.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-25T09-58-55.785589.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-25T09-58-55.785589.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-25T09-58-55.785589.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-25T09-58-55.785589.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-25T09-58-55.785589.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-25T09-58-55.785589.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-25T09-58-55.785589.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-25T09-58-55.785589.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-25T09-58-55.785589.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-25T09-58-55.785589.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-25T09-58-55.785589.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-25T09-58-55.785589.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-25T09-58-55.785589.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-25T09-58-55.785589.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-25T09-58-55.785589.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-25T09-58-55.785589.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-25T09-58-55.785589.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-25T09-58-55.785589.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-25T09-58-55.785589.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-25T09-58-55.785589.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-25T09-58-55.785589.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-25T09-58-55.785589.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-25T09-58-55.785589.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-25T09-58-55.785589.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-25T09-58-55.785589.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-25T09-58-55.785589.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-25T09-58-55.785589.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-25T09-58-55.785589.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-25T09-58-55.785589.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-25T09-58-55.785589.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-25T09-58-55.785589.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-25T09-58-55.785589.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-25T09-58-55.785589.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-25T09-58-55.785589.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-25T09-58-55.785589.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-25T09-58-55.785589.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-25T09-58-55.785589.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-25T09-58-55.785589.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-25T09-58-55.785589.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-25T09-58-55.785589.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-25T09-58-55.785589.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-25T09-58-55.785589.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-25T09-58-55.785589.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-25T09-58-55.785589.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-25T09-58-55.785589.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-25T09-58-55.785589.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-25T09-58-55.785589.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-25T09-58-55.785589.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-25T09-58-55.785589.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-25T09-58-55.785589.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-25T09-58-55.785589.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-25T09-58-55.785589.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-25T09-58-55.785589.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-25T09-58-55.785589.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-25T09-58-55.785589.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-25T09-58-55.785589.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-25T09-58-55.785589.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-25T09-58-55.785589.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-25T09-58-55.785589.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-25T09-58-55.785589.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-25T09-58-55.785589.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-25T09-58-55.785589.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-25T09-58-55.785589.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-25T09-58-55.785589.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-25T09-58-55.785589.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-25T09-58-55.785589.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-25T09-58-55.785589.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-25T09-58-55.785589.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-25T09-58-55.785589.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-25T09-58-55.785589.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-25T09-58-55.785589.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-25T09-58-55.785589.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-25T09-58-55.785589.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-25T09-58-55.785589.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-25T09-58-55.785589.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-25T09-58-55.785589.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-25T09-58-55.785589.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-25T09-58-55.785589.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-25T09-58-55.785589.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-25T09-58-55.785589.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-25T09-58-55.785589.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-25T09-58-55.785589.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-25T09-58-55.785589.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-25T09-58-55.785589.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-25T09-58-55.785589.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-25T09-58-55.785589.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-25T09-58-55.785589.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-25T09-58-55.785589.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-25T09-58-55.785589.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-25T09-58-55.785589.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-25T09-58-55.785589.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-25T09-58-55.785589.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-25T09-58-55.785589.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-25T09-58-55.785589.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-25T09-58-55.785589.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-25T09-58-55.785589.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_01_25T09_58_55.785589", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-25T09-58-55.785589.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-25T09-58-55.785589.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_01_25T09_58_55.785589", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-25T09-58-55.785589.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-25T09-58-55.785589.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_01_25T09_58_55.785589", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-25T09-58-55.785589.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-25T09-58-55.785589.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_01_25T09_58_55.785589", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-25T09-58-55.785589.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-25T09-58-55.785589.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_01_25T09_58_55.785589", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-25T09-58-55.785589.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-25T09-58-55.785589.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_01_25T09_58_55.785589", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-25T09-58-55.785589.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-25T09-58-55.785589.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_01_25T09_58_55.785589", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-25T09-58-55.785589.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-25T09-58-55.785589.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_01_25T09_58_55.785589", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-25T09-58-55.785589.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-25T09-58-55.785589.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_01_25T09_58_55.785589", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-25T09-58-55.785589.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-25T09-58-55.785589.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_01_25T09_58_55.785589", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-25T09-58-55.785589.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-25T09-58-55.785589.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_01_25T09_58_55.785589", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-25T09-58-55.785589.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-25T09-58-55.785589.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_01_25T09_58_55.785589", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-25T09-58-55.785589.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-25T09-58-55.785589.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_01_25T09_58_55.785589", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-25T09-58-55.785589.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-25T09-58-55.785589.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_01_25T09_58_55.785589", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-25T09-58-55.785589.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-25T09-58-55.785589.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_01_25T09_58_55.785589", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-25T09-58-55.785589.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-25T09-58-55.785589.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_01_25T09_58_55.785589", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-25T09-58-55.785589.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-25T09-58-55.785589.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_01_25T09_58_55.785589", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-25T09-58-55.785589.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-25T09-58-55.785589.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_01_25T09_58_55.785589", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-25T09-58-55.785589.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-25T09-58-55.785589.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_01_25T09_58_55.785589", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-25T09-58-55.785589.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-25T09-58-55.785589.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_01_25T09_58_55.785589", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-25T09-58-55.785589.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-25T09-58-55.785589.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_01_25T09_58_55.785589", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-25T09-58-55.785589.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-25T09-58-55.785589.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_01_25T09_58_55.785589", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-25T09-58-55.785589.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-25T09-58-55.785589.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_01_25T09_58_55.785589", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-25T09-58-55.785589.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-25T09-58-55.785589.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_01_25T09_58_55.785589", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-25T09-58-55.785589.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-25T09-58-55.785589.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_01_25T09_58_55.785589", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-25T09-58-55.785589.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-25T09-58-55.785589.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_01_25T09_58_55.785589", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-25T09-58-55.785589.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-25T09-58-55.785589.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_01_25T09_58_55.785589", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-25T09-58-55.785589.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-25T09-58-55.785589.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_01_25T09_58_55.785589", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-25T09-58-55.785589.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-25T09-58-55.785589.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_01_25T09_58_55.785589", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-25T09-58-55.785589.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-25T09-58-55.785589.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_01_25T09_58_55.785589", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-25T09-58-55.785589.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-25T09-58-55.785589.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_01_25T09_58_55.785589", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-25T09-58-55.785589.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-25T09-58-55.785589.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_01_25T09_58_55.785589", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-25T09-58-55.785589.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-25T09-58-55.785589.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_01_25T09_58_55.785589", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-25T09-58-55.785589.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-25T09-58-55.785589.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_01_25T09_58_55.785589", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-25T09-58-55.785589.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-25T09-58-55.785589.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_01_25T09_58_55.785589", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-25T09-58-55.785589.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-25T09-58-55.785589.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_01_25T09_58_55.785589", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-25T09-58-55.785589.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-25T09-58-55.785589.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_01_25T09_58_55.785589", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-25T09-58-55.785589.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-25T09-58-55.785589.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_01_25T09_58_55.785589", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-25T09-58-55.785589.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-25T09-58-55.785589.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_01_25T09_58_55.785589", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-25T09-58-55.785589.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-25T09-58-55.785589.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_01_25T09_58_55.785589", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-25T09-58-55.785589.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-25T09-58-55.785589.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_01_25T09_58_55.785589", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-25T09-58-55.785589.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-25T09-58-55.785589.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_01_25T09_58_55.785589", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-25T09-58-55.785589.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-25T09-58-55.785589.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_01_25T09_58_55.785589", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-25T09-58-55.785589.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-25T09-58-55.785589.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_01_25T09_58_55.785589", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-25T09-58-55.785589.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-25T09-58-55.785589.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_01_25T09_58_55.785589", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-25T09-58-55.785589.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-25T09-58-55.785589.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_01_25T09_58_55.785589", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-25T09-58-55.785589.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-25T09-58-55.785589.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_01_25T09_58_55.785589", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-25T09-58-55.785589.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-25T09-58-55.785589.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_01_25T09_58_55.785589", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-25T09-58-55.785589.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-25T09-58-55.785589.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_01_25T09_58_55.785589", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-25T09-58-55.785589.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-25T09-58-55.785589.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_01_25T09_58_55.785589", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-25T09-58-55.785589.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-25T09-58-55.785589.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_01_25T09_58_55.785589", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-25T09-58-55.785589.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-25T09-58-55.785589.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_01_25T09_58_55.785589", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-25T09-58-55.785589.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-25T09-58-55.785589.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_01_25T09_58_55.785589", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-25T09-58-55.785589.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-25T09-58-55.785589.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_01_25T09_58_55.785589", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-25T09-58-55.785589.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-25T09-58-55.785589.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_01_25T09_58_55.785589", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-25T09-58-55.785589.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-25T09-58-55.785589.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_01_25T09_58_55.785589", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-25T09-58-55.785589.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-25T09-58-55.785589.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_01_25T09_58_55.785589", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-25T09-58-55.785589.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-25T09-58-55.785589.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_01_25T09_58_55.785589", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-25T09-58-55.785589.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-25T09-58-55.785589.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_01_25T09_58_55.785589", "path": ["**/details_harness|winogrande|5_2024-01-25T09-58-55.785589.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-01-25T09-58-55.785589.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_01_25T09_58_55.785589", "path": ["results_2024-01-25T09-58-55.785589.parquet"]}, {"split": "latest", "path": ["results_2024-01-25T09-58-55.785589.parquet"]}]}]}
2024-01-25T10:01:47+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of cognitivecomputations/TinyDolphin-2.8.1-1.1b Dataset automatically created during the evaluation run of model cognitivecomputations/TinyDolphin-2.8.1-1.1b on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-01-25T09:58:55.785589(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of cognitivecomputations/TinyDolphin-2.8.1-1.1b\n\n\n\nDataset automatically created during the evaluation run of model cognitivecomputations/TinyDolphin-2.8.1-1.1b on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-25T09:58:55.785589(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of cognitivecomputations/TinyDolphin-2.8.1-1.1b\n\n\n\nDataset automatically created during the evaluation run of model cognitivecomputations/TinyDolphin-2.8.1-1.1b on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-25T09:58:55.785589(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ 6, 193, 67, 4, 40, 29, 3, 4, 9, 6, 5, 7, 4, 7, 10, 9, 5, 9, 8, 10, 46, 8, 7, 10, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of cognitivecomputations/TinyDolphin-2.8.1-1.1b\n\n\n\nDataset automatically created during the evaluation run of model cognitivecomputations/TinyDolphin-2.8.1-1.1b on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2024-01-25T09:58:55.785589(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):## Dataset Details### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:## Uses### Direct Use### Out-of-Scope Use## Dataset Structure## Dataset Creation### Curation Rationale### Source Data#### Data Collection and Processing#### Who are the source data producers?### Annotations [optional]#### Annotation process#### Who are the annotators?#### Personal and Sensitive Information## Bias, Risks, and Limitations### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:## Glossary [optional]## More Information [optional]## Dataset Card Authors [optional]" ]
[ -0.04904435947537422, 0.21517649292945862, -0.004098823294043541, 0.0380692295730114, 0.0895071029663086, -0.006460865959525108, 0.008523385040462017, 0.1218777522444725, 0.011421498842537403, 0.18389098346233368, -0.011963700875639915, 0.09534063190221786, 0.08780238777399063, 0.11145415902137756, 0.020438367500901222, -0.13577695190906525, 0.0172352846711874, -0.07067172229290009, 0.1064353808760643, 0.07990160584449768, 0.08016278594732285, -0.08258119225502014, 0.058975398540496826, -0.05316542461514473, -0.006126459687948227, -0.008104613982141018, -0.09267819672822952, -0.031347401440143585, 0.0908016636967659, 0.08576387166976929, 0.032838668674230576, -0.013675816357135773, 0.017070923000574112, -0.25955304503440857, 0.013256454840302467, 0.0865456834435463, -0.0010865145595744252, 0.04133794456720352, 0.11814779788255692, -0.07817099988460541, 0.033208563923835754, -0.07675469666719437, 0.06476691365242004, 0.04722356051206589, -0.12030268460512161, -0.12029057741165161, -0.1385682225227356, 0.010989001020789146, 0.07268718630075455, 0.0390598438680172, -0.02818591147661209, 0.15942031145095825, -0.031358424574136734, 0.049162957817316055, 0.13271130621433258, -0.10120196640491486, -0.018345875665545464, 0.060179777443408966, 0.02913137525320053, 0.07595380395650864, -0.08631222695112228, -0.009597484022378922, 0.03886367753148079, 0.04375499486923218, 0.005910961888730526, 0.00854682270437479, -0.041795939207077026, 0.012868608348071575, -0.14313751459121704, -0.11996330320835114, 0.18612979352474213, 0.01563594862818718, -0.03867538645863533, -0.1742149442434311, -0.030070515349507332, 0.0033046568278223276, -0.0037840160075575113, -0.045364685356616974, 0.009707820601761341, -0.023234650492668152, 0.08452094346284866, -0.019044265151023865, -0.09706756472587585, -0.009309476241469383, -0.0009600453777238727, 0.03862090781331062, 0.01941847987473011, -0.019380949437618256, 0.007493930868804455, 0.1081257089972496, -0.03524725139141083, -0.0814899429678917, -0.07487831264734268, -0.04844840243458748, -0.09959942102432251, -0.05411747097969055, 0.01180258858948946, -0.0780295804142952, 0.024028891697525978, 0.2156640887260437, -0.01419577281922102, 0.038389112800359726, -0.11014430224895477, 0.010322347283363342, 0.12992195785045624, 0.061843231320381165, -0.07344374805688858, -0.06450280547142029, -0.02139243483543396, 0.029645627364516258, 0.03649405017495155, -0.02296293154358864, 0.0058793663047254086, 0.08098597824573517, 0.06381078064441681, 0.11850789189338684, 0.12315981090068817, 0.0257648266851902, -0.07169481366872787, -0.02758820354938507, 0.22225898504257202, -0.14156357944011688, -0.011908299289643764, 0.005792965646833181, -0.03456870838999748, -0.10448881983757019, 0.08292827755212784, 0.003031824016943574, -0.053904224187135696, 0.106441430747509, -0.05131936073303223, -0.07301273941993713, -0.07950497418642044, -0.05150269716978073, 0.06682699918746948, -0.01857154257595539, -0.030689584091305733, -0.07858645170927048, -0.0969918966293335, -0.07346328347921371, 0.02266717329621315, -0.07616136223077774, -0.019595980644226074, 0.03133668377995491, -0.025092005729675293, -0.017168371006846428, -0.018941214308142662, 0.10036568343639374, -0.05063488706946373, 0.03319796174764633, 0.014883854426443577, 0.01154542900621891, 0.07732443511486053, 0.0434754341840744, -0.12425035983324051, 0.08237717300653458, -0.12109548598527908, 0.09718085080385208, -0.1016392856836319, -0.017466478049755096, -0.13627178966999054, -0.012413284741342068, -0.02830396592617035, 0.01477291900664568, 0.0018532603280618787, 0.10023011267185211, -0.24350464344024658, 0.025141149759292603, 0.12645062804222107, -0.09610328823328018, -0.11612742394208908, 0.08103420585393906, -0.0474703386425972, 0.08598899096250534, 0.05362589284777641, 0.12187884002923965, 0.09875175356864929, -0.080047607421875, -0.1211761087179184, -0.07855040580034256, -0.023512529209256172, 0.15315686166286469, 0.058337513357400894, -0.06617242097854614, 0.11861231178045273, 0.04169116169214249, -0.018568888306617737, -0.10091888159513474, -0.01908901333808899, -0.06593870371580124, -0.026591308414936066, -0.051138702780008316, -0.07613153010606766, 0.008073323406279087, -0.08965032547712326, -0.027344053611159325, -0.08988888561725616, 0.02520688623189926, 0.08119324594736099, -0.02641610987484455, 0.016343979164958, -0.06865689158439636, 0.04203594848513603, -0.0032986041624099016, 0.02404698170721531, -0.2159624993801117, -0.10033591091632843, 0.04529601335525513, -0.1390540897846222, 0.05365719273686409, 0.03507630527019501, 0.013330060057342052, 0.036959752440452576, -0.016649138182401657, 0.020717792212963104, 0.01878438889980316, 0.003219706006348133, -0.023697905242443085, -0.1423366814851761, -0.05204852297902107, -0.07811319082975388, 0.08878330886363983, -0.11993903666734695, -0.017599502578377724, 0.0819229707121849, 0.16406916081905365, -0.0004724399477709085, -0.0842086672782898, 0.0719040110707283, -0.00029207245097495615, -0.04069896042346954, -0.059836409986019135, 0.01174307893961668, -0.026728026568889618, 0.0402119979262352, 0.018236255273222923, -0.20944854617118835, -0.16058295965194702, 0.08104993402957916, 0.12439104169607162, -0.0683865025639534, -0.06686544418334961, -0.06386725604534149, -0.06420140713453293, -0.07143135368824005, -0.08123534172773361, 0.07481357455253601, 0.07094445824623108, 0.03540698066353798, -0.07605970650911331, -0.06422561407089233, 0.01521639060229063, 0.04904880374670029, -0.07783511281013489, 0.09518914669752121, 0.06997374445199966, -0.1043955460190773, 0.10364492982625961, -0.009913831017911434, 0.12630710005760193, 0.06002264842391014, 0.03085692599415779, -0.09882940351963043, -0.011716772802174091, 0.05432572960853577, 0.0351908802986145, 0.08607960492372513, -0.05839230865240097, 0.031521763652563095, 0.08011090010404587, -0.001091470243409276, 0.04485992714762688, -0.05743144825100899, 0.039079952985048294, 0.03949839994311333, 0.009357656352221966, 0.04616094380617142, -0.0021237374749034643, 0.0021815067157149315, 0.06651142239570618, 0.03641394525766373, 0.10565416514873505, -0.011875705793499947, -0.04259561002254486, -0.09883205592632294, 0.13661211729049683, -0.08320711553096771, -0.3105086386203766, -0.13983894884586334, -0.03789975121617317, -0.03761206567287445, -0.01422968041151762, 0.0641208216547966, -0.013307987712323666, -0.09797132760286331, -0.10871554166078568, 0.02673768624663353, 0.0070239827036857605, -0.12468508630990982, -0.05426686257123947, 0.05598519742488861, 0.01297131460160017, -0.16603843867778778, 0.04050382599234581, 0.0460548959672451, -0.04824560508131981, 0.0021074607502669096, 0.0881151631474495, 0.14175400137901306, 0.08033253997564316, 0.055670879781246185, -0.02565320022404194, -0.009368752129375935, 0.18443958461284637, -0.1035224124789238, 0.04005621373653412, 0.11054676026105881, -0.050542548298835754, 0.06499212235212326, 0.17698615789413452, 0.0028678702656179667, -0.11226707696914673, 0.05004672333598137, 0.08514793962240219, -0.0703527107834816, -0.24497783184051514, -0.1047762930393219, -0.014540382660925388, 0.013017377816140652, 0.10694476962089539, 0.05519767478108406, 0.01330918911844492, 0.01796405203640461, -0.11026094108819962, -0.026796482503414154, -0.068147212266922, 0.08756591379642487, 0.05456513538956642, -0.017693430185317993, 0.049607712775468826, -0.038321711122989655, 0.02802722342312336, 0.1163177341222763, 0.054987434297800064, 0.1529371440410614, -0.02605258859694004, 0.17579323053359985, 0.08303626626729965, 0.10138799250125885, -0.05148834362626076, 0.0329238697886467, 0.004202850628644228, 0.06700818240642548, -0.01869957521557808, -0.09665198624134064, -0.056340381503105164, 0.09868898242712021, 0.02048901654779911, -0.06530681997537613, 0.023105621337890625, -0.0531749427318573, 0.05000540241599083, 0.18287190794944763, -0.023386849090456963, -0.14804792404174805, -0.06461326777935028, 0.06540022790431976, -0.024664295837283134, -0.0925108939409256, -0.02865646407008171, 0.06594586372375488, -0.13893790543079376, 0.016433632001280785, -0.02662927284836769, 0.07914233207702637, -0.13636447489261627, -0.018270839005708694, -0.02993677742779255, 0.036697763949632645, 0.008351611904799938, 0.12082624435424805, -0.1244986280798912, 0.11508890986442566, 0.013073229230940342, 0.024397773668169975, -0.11379637569189072, 0.0415305569767952, -0.05863577127456665, -0.0494392104446888, 0.14990945160388947, -0.015311185270547867, -0.08783277124166489, -0.04793000966310501, -0.12029841542243958, -0.001580204232595861, 0.08972066640853882, -0.11363939940929413, 0.10981662571430206, 0.028997575864195824, -0.01802789978682995, -0.02173241600394249, -0.01762707717716694, -0.1296953707933426, -0.22512109577655792, 0.11787880212068558, -0.10918762534856796, 0.0516975037753582, -0.04996545612812042, -0.03675650432705879, -0.0483192577958107, 0.19553086161613464, -0.10193735361099243, -0.04943552613258362, -0.11800819635391235, 0.040228698402643204, 0.1810000240802765, -0.04265958070755005, 0.04815853759646416, -0.050843458622694016, 0.18277263641357422, -0.0018408552277833223, -0.0472724474966526, 0.0002830991870723665, -0.09858141094446182, -0.1606285721063614, -0.053036563098430634, 0.14396102726459503, 0.06036796793341637, 0.02230973169207573, 0.0041917250491678715, 0.041639264672994614, 0.018371352925896645, -0.0887787714600563, 0.03256016597151756, 0.0972326472401619, 0.11681978404521942, 0.02892153523862362, -0.035119157284498215, -0.08880726993083954, -0.10780732333660126, -0.09690656512975693, 0.07240161299705505, 0.14317026734352112, -0.06555456668138504, 0.1599130630493164, 0.13857360184192657, -0.10423128306865692, -0.20168109238147736, -0.03793110325932503, 0.029546644538640976, -0.03432386741042137, 0.11121416836977005, -0.1889529675245285, 0.0721229538321495, 0.05681837722659111, -0.010041818954050541, 0.0792301744222641, -0.24757440388202667, -0.1354418843984604, 0.01891457848250866, 0.03356873616576195, -0.2212035059928894, -0.1828223019838333, -0.10668692737817764, -0.026473185047507286, -0.14592021703720093, 0.14314450323581696, -0.009372285567224026, 0.03469718247652054, -0.009193762205541134, 0.07265808433294296, 0.05724557489156723, -0.068813256919384, 0.13599516451358795, -0.0016642988193780184, 0.013491316698491573, -0.1022409200668335, -0.0189885962754488, 0.022606387734413147, -0.051693737506866455, 0.10693914443254471, 0.05338646098971367, 0.05682580545544624, -0.07032100111246109, -0.03644572198390961, -0.05909629166126251, 0.04494346305727959, -0.06909392029047012, -0.05762362852692604, -0.07339222729206085, 0.08312752097845078, 0.08819203078746796, -0.0137638496235013, 0.02211768552660942, -0.0334630012512207, 0.03019062988460064, 0.21831195056438446, 0.1239248514175415, 0.024074481800198555, -0.12012370675802231, -0.015370101667940617, -0.01414231676608324, -0.012787931598722935, -0.15076585114002228, 0.03461429476737976, 0.09090805798768997, 0.04603104293346405, 0.08192428946495056, -0.029886525124311447, -0.19389453530311584, -0.00808931514620781, 0.0750979632139206, -0.10895529389381409, -0.22625704109668732, 0.034973274916410446, 0.1537632942199707, -0.16408328711986542, -0.05632227286696434, 0.09400624781847, 0.016774004325270653, -0.029552696272730827, -0.0028736405074596405, 0.08111768215894699, 0.05873925983905792, 0.08487050980329514, 0.009713994339108467, 0.051278937608003616, -0.07508790493011475, 0.11389384418725967, 0.1472843736410141, -0.14419247210025787, 0.03943975269794464, 0.05332460626959801, -0.0514046810567379, -0.06491649150848389, 0.011519144289195538, -0.006871141493320465, 0.01703832298517227, -0.040928345173597336, 0.02125663496553898, -0.0029346970841288567, 0.038799263536930084, 0.1298958957195282, 0.007382566574960947, 0.0323895625770092, 0.0335819385945797, -0.003315214067697525, -0.09424365311861038, 0.09799019992351532, 0.024142159149050713, 0.042393289506435394, -0.03709618002176285, 0.018352437764406204, 0.02446955442428589, -0.0068280636332929134, 0.01454748585820198, -0.03816522657871246, -0.05288923531770706, -0.005484253633767366, -0.13726526498794556, 0.030104022473096848, -0.07236343622207642, -0.004380146972835064, -0.0026903811376541853, -0.02410254254937172, -0.021079719066619873, 0.01510945986956358, -0.056924838572740555, -0.06125430017709732, -0.0430455207824707, 0.12123388797044754, -0.19174352288246155, -0.009149201214313507, 0.09714748710393906, -0.0651821568608284, 0.0734967514872551, -0.0018575332360342145, -0.02064160630106926, 0.014615398831665516, -0.08458371460437775, -0.01118528749793768, -0.016413185745477676, 0.05647963285446167, 0.01772860437631607, -0.16333678364753723, -0.020155832171440125, 0.01572076603770256, -0.06952723115682602, -0.005446710158139467, 0.06481646001338959, -0.15245993435382843, 0.011496425606310368, 0.06052115932106972, -0.0365469865500927, -0.04739460349082947, 0.04110301285982132, 0.04945273697376251, 0.004534102510660887, 0.08661727607250214, -0.0004904974484816194, 0.043368127197027206, -0.15580050647258759, -0.05133853107690811, -0.008766700513660908, 0.008635447360575199, 0.016499921679496765, 0.028109179809689522, 0.039263952523469925, -0.005044909194111824, 0.21809910237789154, -0.017783677205443382, 0.08615469932556152, 0.038890041410923004, 0.008113552816212177, -0.042538248002529144, 0.023462381213903427, 0.02805100381374359, 0.00963604636490345, 0.01943102665245533, 0.028651485219597816, -0.011527116410434246, -0.04344754293560982, -0.060465939342975616, 0.06511448323726654, 0.1513395607471466, 0.16027042269706726, -0.0360025130212307, 0.06828565150499344, -0.16332875192165375, -0.05383340269327164, 0.05470910295844078, -0.03562713786959648, 0.04807056114077568, -0.0727342888712883, 0.057384662330150604, 0.0711093321442604, -0.09839951992034912, 0.13714170455932617, -0.07501786202192307, -0.04615781456232071, -0.02411157824099064, -0.12363508343696594, -0.05225712060928345, 0.018749738112092018, 0.012162926606833935, -0.10572332888841629, 0.10020130127668381, 0.12036079168319702, -0.019263792783021927, -0.016276707872748375, 0.11440063267946243, -0.06524074822664261, -0.058463845402002335, -0.030033089220523834, 0.008496604859828949, 0.017023179680109024, -0.007309925742447376, 0.07583478093147278, 0.01672155037522316, 0.08168572932481766, 0.06934671849012375, 0.10297957807779312, 0.05727691203355789, 0.00857345201075077, -0.0431806705892086, -0.07569825649261475, -0.005970050115138292, -0.004193502478301525, -0.04290444403886795, 0.20797660946846008, 0.04481835663318634, 0.014484208077192307, 0.004400022327899933, 0.19647864997386932, 0.0042305272072553635, -0.055914830416440964, -0.12771964073181152, 0.11690501123666763, -0.00876048393547535, 0.013995908200740814, 0.031712181866168976, -0.1392160803079605, 0.027920659631490707, 0.172599658370018, 0.098775215446949, 0.0326998233795166, 0.004010149743407965, 0.03033759631216526, 0.02532615140080452, -0.0335240513086319, 0.03400601074099541, 0.04299025610089302, 0.17156347632408142, -0.06589607894420624, 0.05442913994193077, -0.010009564459323883, -0.020790360867977142, -0.02138272114098072, 0.0860341265797615, -0.04133877530694008, 0.02786213532090187, -0.05678139999508858, 0.10078827291727066, -0.035361919552087784, -0.28451067209243774, -0.013833045959472656, -0.11556835472583771, -0.12395928800106049, -0.014429700560867786, 0.03980584815144539, -0.03804058954119682, 0.038114964962005615, 0.039967868477106094, -0.02006569132208824, 0.18974480032920837, 0.012304021045565605, -0.08742797374725342, -0.04682772979140282, 0.06121784448623657, -0.03296884521842003, 0.2617361545562744, -0.005760982166975737, 0.0656910315155983, 0.10200471431016922, -0.019621284678578377, -0.16456741094589233, 0.010273091495037079, 0.11241399496793747, -0.036236658692359924, 0.061866823583841324, 0.1810114085674286, -0.025444049388170242, 0.1337292194366455, 0.05497610196471214, -0.04850875958800316, 0.04572666063904762, 0.07130097597837448, 0.045599315315485, -0.1016303300857544, 0.07926887273788452, -0.09146856516599655, 0.1469000279903412, 0.10784319043159485, -0.03204574063420296, -0.006264891941100359, -0.0720362514257431, 0.05748880282044411, -0.024975629523396492, 0.11719419062137604, -0.0034340249840170145, -0.16951021552085876, 0.03988641873002052, 0.0271251630038023, 0.06039762496948242, -0.23449201881885529, -0.07303943485021591, 0.14059162139892578, -0.03815033659338951, -0.0017661204328760505, 0.08951477706432343, 0.03459857404232025, 0.005100721027702093, -0.06919447332620621, -0.08195112645626068, -0.0028281481936573982, 0.12165005505084991, -0.09219473600387573, -0.03984210267663002 ]
73646e4fe3aa279be5f82d9b4ef8e10afa75cc45
# SentiMP-En Dataset The SentiMP-En Dataset is an english sentiment analysis dataset based on tweets written by members of parliament in United Kingdom in 2021. It has been developed collaboratively by the [Andalusian Research Institute in Data Science and Computational Intelligence (DaSCI)](https://dasci.es/) research group from the [University of Granada](https://www.ugr.es/), the [SINAI](https://sinai.ujaen.es/) research group from the [University of Jaén](https://www.ujaen.es/) and the [Cardiff NLP](https://sites.google.com/view/cardiffnlp/) research group from the [University of Cardiff](https://isc.cardiff.ac.uk/). <div align="center", style="text-align:center; display:block"> <img style="float:left; padding-right:10px" src="https://dasci.es/wp-content/uploads/2018/12/DaSCI_logo_vertical.png" alt="DaSCI" width="150"/> <img style="float:left; padding-right:10px" src="https://www.ujaen.es/gobierno/viccom/sites/gobierno_viccom/files/uploads/inline-images/Marca%20Tradicional.png" alt="UJAEN" width="175"/> <img style="float:left;" src="https://upload.wikimedia.org/wikipedia/commons/e/ef/Cardiff_University_%28logo%29.svg" alt="Cardiff" width="125"/> </div> <div style="clear:both"></div> ## Dataset details The dataset containst 500 tweets in English. For each tweet we provide the following information: * **full_text**: Which containts the content of the tweet. * **fold**: Proposed partitions \{0,1,2,3,4\} in 5 folds for 5 fold cross-validation for the sake of reproducibility. * **label_i** : Annotator's i label (i in \{1,2,3\}). It takes values in \{-1,0,1\}. * **majority_vote**: The result after applying the majority vote strategy to the annotators' partial labelling. When there is a tie we use the label "TIE". It takes values in \{-1,0,1,TIE\}. * **tie_break**: We use this column to break ties in cases where there is a tie. Therefore, it is only completed when TIE appears in the *majority_vote* column. It takes values in \{-1,0,1\}. * **gold_label**: It represents the final label. It is a combination between the *majority_vote* abd the *tie_break* columns. It takes values in \{-1,0,1\}. ## Citation If you use this dataset, please cite: ## Contact Nuria Rodríguez Barroso - [email protected] ## Acknowledgements This work was partly supported by the grants PID2020-119478GB-I00, PID2020-116118GA-I00 and TED2021-130145B-I00 funded by MCIN/AEI/10.13039/501100011033 of the Spanish Government. Shield: [![CC BY-SA 4.0][cc-by-sa-shield]][cc-by-sa] This work is licensed under a [Creative Commons Attribution-ShareAlike 4.0 International License][cc-by-sa]. [![CC BY-SA 4.0][cc-by-sa-image]][cc-by-sa] [cc-by-sa]: http://creativecommons.org/licenses/by-sa/4.0/ [cc-by-sa-image]: https://licensebuttons.net/l/by-sa/4.0/88x31.png [cc-by-sa-shield]: https://img.shields.io/badge/License-CC%20BY--SA%204.0-lightgrey.svg
rbnuria/SentiMP-En
[ "task_categories:text-classification", "size_categories:n<1K", "language:en", "license:cc-by-sa-4.0", "code", "region:us" ]
2024-01-25T10:31:27+00:00
{"language": ["en"], "license": "cc-by-sa-4.0", "size_categories": ["n<1K"], "task_categories": ["text-classification"], "tags": ["code"]}
2024-02-01T08:56:32+00:00
[]
[ "en" ]
TAGS #task_categories-text-classification #size_categories-n<1K #language-English #license-cc-by-sa-4.0 #code #region-us
# SentiMP-En Dataset The SentiMP-En Dataset is an english sentiment analysis dataset based on tweets written by members of parliament in United Kingdom in 2021. It has been developed collaboratively by the Andalusian Research Institute in Data Science and Computational Intelligence (DaSCI) research group from the University of Granada, the SINAI research group from the University of Jaén and the Cardiff NLP research group from the University of Cardiff. <div align="center", style="text-align:center; display:block"> <img style="float:left; padding-right:10px" src="URL alt="DaSCI" width="150"/> <img style="float:left; padding-right:10px" src="URL alt="UJAEN" width="175"/> <img style="float:left;" src="URL alt="Cardiff" width="125"/> </div> <div style="clear:both"></div> ## Dataset details The dataset containst 500 tweets in English. For each tweet we provide the following information: * full_text: Which containts the content of the tweet. * fold: Proposed partitions \{0,1,2,3,4\} in 5 folds for 5 fold cross-validation for the sake of reproducibility. * label_i : Annotator's i label (i in \{1,2,3\}). It takes values in \{-1,0,1\}. * majority_vote: The result after applying the majority vote strategy to the annotators' partial labelling. When there is a tie we use the label "TIE". It takes values in \{-1,0,1,TIE\}. * tie_break: We use this column to break ties in cases where there is a tie. Therefore, it is only completed when TIE appears in the *majority_vote* column. It takes values in \{-1,0,1\}. * gold_label: It represents the final label. It is a combination between the *majority_vote* abd the *tie_break* columns. It takes values in \{-1,0,1\}. If you use this dataset, please cite: ## Contact Nuria Rodríguez Barroso - rbnuria@URL ## Acknowledgements This work was partly supported by the grants PID2020-119478GB-I00, PID2020-116118GA-I00 and TED2021-130145B-I00 funded by MCIN/AEI/10.13039/501100011033 of the Spanish Government. Shield: [![CC BY-SA 4.0][cc-by-sa-shield]][cc-by-sa] This work is licensed under a [Creative Commons Attribution-ShareAlike 4.0 International License][cc-by-sa]. [![CC BY-SA 4.0][cc-by-sa-image]][cc-by-sa] [cc-by-sa]: URL [cc-by-sa-image]: URL [cc-by-sa-shield]: URL
[ "# SentiMP-En Dataset\n\nThe SentiMP-En Dataset is an english sentiment analysis dataset based on tweets written by members of parliament in United Kingdom in 2021. It has been developed collaboratively by the Andalusian Research Institute in Data Science and Computational Intelligence (DaSCI) research group from the University of Granada, the SINAI research group from the University of Jaén and the Cardiff NLP research group from the University of Cardiff.\n\n<div align=\"center\", style=\"text-align:center; display:block\">\n<img style=\"float:left; padding-right:10px\" src=\"URL alt=\"DaSCI\" width=\"150\"/>\n<img style=\"float:left; padding-right:10px\" src=\"URL alt=\"UJAEN\" width=\"175\"/>\n<img style=\"float:left;\" src=\"URL alt=\"Cardiff\" width=\"125\"/>\n</div>\n<div style=\"clear:both\"></div>", "## Dataset details\n\nThe dataset containst 500 tweets in English. For each tweet we provide the following information:\n* full_text: Which containts the content of the tweet.\n* fold: Proposed partitions \\{0,1,2,3,4\\} in 5 folds for 5 fold cross-validation for the sake of reproducibility.\n* label_i : Annotator's i label (i in \\{1,2,3\\}). It takes values in \\{-1,0,1\\}.\n* majority_vote: The result after applying the majority vote strategy to the annotators' partial labelling. When there is a tie we use the label \"TIE\". It takes values in \\{-1,0,1,TIE\\}.\n* tie_break: We use this column to break ties in cases where there is a tie. Therefore, it is only completed when TIE appears in the *majority_vote* column. It takes values in \\{-1,0,1\\}.\n* gold_label: It represents the final label. It is a combination between the *majority_vote* abd the *tie_break* columns. It takes values in \\{-1,0,1\\}.\n\n\nIf you use this dataset, please cite:", "## Contact\nNuria Rodríguez Barroso - rbnuria@URL", "## Acknowledgements\n\nThis work was partly supported by the grants PID2020-119478GB-I00, PID2020-116118GA-I00 and TED2021-130145B-I00 funded by MCIN/AEI/10.13039/501100011033 of the Spanish Government. \n\nShield: [![CC BY-SA 4.0][cc-by-sa-shield]][cc-by-sa]\n\nThis work is licensed under a\n[Creative Commons Attribution-ShareAlike 4.0 International License][cc-by-sa].\n\n[![CC BY-SA 4.0][cc-by-sa-image]][cc-by-sa]\n\n[cc-by-sa]: URL\n[cc-by-sa-image]: URL\n[cc-by-sa-shield]: URL" ]
[ "TAGS\n#task_categories-text-classification #size_categories-n<1K #language-English #license-cc-by-sa-4.0 #code #region-us \n", "# SentiMP-En Dataset\n\nThe SentiMP-En Dataset is an english sentiment analysis dataset based on tweets written by members of parliament in United Kingdom in 2021. It has been developed collaboratively by the Andalusian Research Institute in Data Science and Computational Intelligence (DaSCI) research group from the University of Granada, the SINAI research group from the University of Jaén and the Cardiff NLP research group from the University of Cardiff.\n\n<div align=\"center\", style=\"text-align:center; display:block\">\n<img style=\"float:left; padding-right:10px\" src=\"URL alt=\"DaSCI\" width=\"150\"/>\n<img style=\"float:left; padding-right:10px\" src=\"URL alt=\"UJAEN\" width=\"175\"/>\n<img style=\"float:left;\" src=\"URL alt=\"Cardiff\" width=\"125\"/>\n</div>\n<div style=\"clear:both\"></div>", "## Dataset details\n\nThe dataset containst 500 tweets in English. For each tweet we provide the following information:\n* full_text: Which containts the content of the tweet.\n* fold: Proposed partitions \\{0,1,2,3,4\\} in 5 folds for 5 fold cross-validation for the sake of reproducibility.\n* label_i : Annotator's i label (i in \\{1,2,3\\}). It takes values in \\{-1,0,1\\}.\n* majority_vote: The result after applying the majority vote strategy to the annotators' partial labelling. When there is a tie we use the label \"TIE\". It takes values in \\{-1,0,1,TIE\\}.\n* tie_break: We use this column to break ties in cases where there is a tie. Therefore, it is only completed when TIE appears in the *majority_vote* column. It takes values in \\{-1,0,1\\}.\n* gold_label: It represents the final label. It is a combination between the *majority_vote* abd the *tie_break* columns. It takes values in \\{-1,0,1\\}.\n\n\nIf you use this dataset, please cite:", "## Contact\nNuria Rodríguez Barroso - rbnuria@URL", "## Acknowledgements\n\nThis work was partly supported by the grants PID2020-119478GB-I00, PID2020-116118GA-I00 and TED2021-130145B-I00 funded by MCIN/AEI/10.13039/501100011033 of the Spanish Government. \n\nShield: [![CC BY-SA 4.0][cc-by-sa-shield]][cc-by-sa]\n\nThis work is licensed under a\n[Creative Commons Attribution-ShareAlike 4.0 International License][cc-by-sa].\n\n[![CC BY-SA 4.0][cc-by-sa-image]][cc-by-sa]\n\n[cc-by-sa]: URL\n[cc-by-sa-image]: URL\n[cc-by-sa-shield]: URL" ]
[ 44, 229, 292, 14, 175 ]
[ "passage: TAGS\n#task_categories-text-classification #size_categories-n<1K #language-English #license-cc-by-sa-4.0 #code #region-us \n# SentiMP-En Dataset\n\nThe SentiMP-En Dataset is an english sentiment analysis dataset based on tweets written by members of parliament in United Kingdom in 2021. It has been developed collaboratively by the Andalusian Research Institute in Data Science and Computational Intelligence (DaSCI) research group from the University of Granada, the SINAI research group from the University of Jaén and the Cardiff NLP research group from the University of Cardiff.\n\n<div align=\"center\", style=\"text-align:center; display:block\">\n<img style=\"float:left; padding-right:10px\" src=\"URL alt=\"DaSCI\" width=\"150\"/>\n<img style=\"float:left; padding-right:10px\" src=\"URL alt=\"UJAEN\" width=\"175\"/>\n<img style=\"float:left;\" src=\"URL alt=\"Cardiff\" width=\"125\"/>\n</div>\n<div style=\"clear:both\"></div>" ]
[ -0.08652948588132858, 0.037782661616802216, -0.003472451353445649, 0.06038687750697136, 0.06716787070035934, 0.05733933299779892, 0.14314968883991241, 0.06603218615055084, 0.13725145161151886, 0.10135067254304886, 0.029591478407382965, 0.049706049263477325, 0.08907028287649155, 0.035473961383104324, 0.03602922707796097, -0.18715864419937134, 0.01425969135016203, -0.0455162487924099, -0.07890133559703827, 0.08904973417520523, 0.11312998086214066, -0.08382976800203323, 0.11869393289089203, -0.029441412538290024, -0.0725729838013649, 0.009199822321534157, -0.10513662546873093, -0.0232401080429554, 0.09018345177173615, 0.029408147558569908, 0.10450994968414307, 0.05597510188817978, -0.039634138345718384, -0.07060398161411285, 0.04887492209672928, 0.013326984830200672, -0.031819332391023636, 0.0670570656657219, 0.1396845579147339, -0.0284437108784914, 0.3132416307926178, -0.05057802051305771, 0.02423526532948017, 0.00027457321994006634, -0.10560636967420578, -0.09218524396419525, -0.07999265193939209, -0.017094524577260017, -0.014498992823064327, -0.0032136912923306227, -0.04253266751766205, 0.038980863988399506, -0.10186005383729935, 0.06086575984954834, 0.059523407369852066, -0.1111811101436615, -0.021941905841231346, -0.010180637240409851, 0.012426692061126232, 0.1577594131231308, -0.045349009335041046, 0.012376583181321621, -0.029196470975875854, 0.03840864449739456, 0.002476906171068549, -0.04771186783909798, -0.14698345959186554, -0.02727600559592247, -0.08958246558904648, -0.014082391746342182, 0.37668904662132263, 0.013865663670003414, -0.015803497284650803, -0.07542949914932251, -0.005835482385009527, 0.11079835146665573, -0.0326656848192215, -0.045313816517591476, -0.024627940729260445, 0.028060374781489372, 0.04177308455109596, 0.017692741006612778, -0.10701719671487808, 0.05219241604208946, -0.14525803923606873, 0.12322565913200378, -0.028667377308011055, 0.03629071265459061, -0.015521090477705002, 0.040285851806402206, 0.01019464060664177, -0.11393070966005325, -0.013059766963124275, -0.060773931443691254, 0.09568329155445099, -0.027061382308602333, -0.00893956609070301, -0.17029818892478943, 0.1087736114859581, 0.023425549268722534, -0.06425449252128601, 0.02316448464989662, -0.016163595020771027, 0.031056903302669525, 0.1483651101589203, 0.09018968045711517, -0.1403178721666336, 0.028684036806225777, 0.04639969393610954, 0.0242026224732399, 0.052068665623664856, 0.021840501576662064, -0.04968372732400894, 0.010990746319293976, -0.09072669595479965, -0.040166452527046204, 0.04581676051020622, 0.0914195254445076, -0.18111413717269897, -0.058583978563547134, 0.17966322600841522, -0.05337494984269142, 0.04525507986545563, 0.07846038043498993, -0.07078661024570465, 0.14789050817489624, 0.05136058107018471, -0.02220141515135765, 0.0002870654279831797, 0.017624780535697937, -0.026860244572162628, 0.01046539656817913, -0.013607686385512352, -0.13727930188179016, 0.1271863877773285, -0.033919401466846466, -0.01091834343969822, -0.14193589985370636, -0.12930911779403687, -0.07722294330596924, 0.031693700700998306, -0.05107102170586586, 0.0972430408000946, -0.12336846441030502, 0.021026985719799995, 0.028849918395280838, 0.02943071909248829, -0.10314560681581497, -0.06199871748685837, 0.0889042466878891, -0.07841049879789352, 0.1123146191239357, -0.17891736328601837, 0.03063538670539856, -0.1589910089969635, 0.013799184001982212, -0.02517095021903515, 0.067509226500988, -0.04800563305616379, 0.1579432189464569, -0.05434490740299225, 0.023143820464611053, -0.09218674898147583, 0.036493729799985886, 0.09381153434515, 0.14682400226593018, -0.2353472113609314, -0.05785254016518593, 0.009346302598714828, -0.054126519709825516, -0.1649872064590454, 0.175840362906456, -0.005281023681163788, 0.1987411081790924, 0.10381581634283066, 0.07694848626852036, 0.09394901990890503, -0.16404740512371063, -0.1207362711429596, -0.03971989080309868, -0.0882253348827362, -0.05762418359518051, 0.03725099563598633, 0.013727298006415367, 0.034697387367486954, 0.08107838034629822, 0.05030469223856926, 0.11256212741136551, -0.026992930099368095, -0.011719275265932083, 0.011690298095345497, 0.0068450747057795525, 0.03158290684223175, 0.0571720227599144, -0.0373646505177021, -0.12141306698322296, -0.022479582577943802, -0.1100875735282898, 0.054187871515750885, -0.039270855486392975, -0.00914884265512228, -0.05961453169584274, 0.23142148554325104, -0.0547916404902935, 0.0784546434879303, -0.15882021188735962, -0.03945418447256088, 0.012118479236960411, 0.12516093254089355, 0.08121151477098465, 0.03568581864237785, 0.02490968629717827, -0.047069042921066284, 0.008923578076064587, 0.00857976172119379, -0.013120586052536964, -0.04637600854039192, -0.022304421290755272, -0.215940922498703, 0.10188434273004532, -0.04528908059000969, 0.04348126798868179, -0.09749545156955719, 0.016798363998532295, 0.16826209425926208, 0.12012741714715958, 0.04502332955598831, 0.017184818163514137, 0.04123711958527565, 0.0695168748497963, -0.0813831314444542, 0.011361830867826939, 0.019372032955288887, -0.014812441542744637, -0.010404097847640514, 0.019170746207237244, -0.06773144751787186, -0.07553140819072723, 0.15193170309066772, -0.15954236686229706, -0.002331105060875416, 0.07377656549215317, -0.03782801330089569, 0.06738393753767014, -0.05353698506951332, 0.008294663392007351, 0.011509533040225506, 0.014918377622961998, 0.0881735160946846, -0.08556117117404938, -0.027285313233733177, 0.007809730246663094, -0.04955139756202698, -0.10776782780885696, 0.15039750933647156, 0.10297228395938873, -0.11338147521018982, 0.07564281672239304, 0.14044195413589478, -0.008870732970535755, 0.19261948764324188, 0.011947764083743095, -0.06197647750377655, -0.03719109669327736, -0.01473852340131998, -0.03229880332946777, 0.08871796727180481, -0.03530615568161011, -0.007795026525855064, 0.05988802760839462, -0.03441054746508598, -0.029660455882549286, -0.09956385940313339, -0.003354978282004595, -0.006542395334690809, -0.0008776261238381267, -0.09478576481342316, 0.05136891454458237, 0.007482184562832117, 0.11872027814388275, -0.0156546663492918, -0.04302974045276642, -0.03790774941444397, -0.014202732592821121, -0.09937596321105957, 0.06891933083534241, -0.12220556288957596, -0.30467531085014343, -0.0642704963684082, 0.03578053042292595, -0.02845240756869316, 0.02541026845574379, 0.03136836364865303, -0.11416341364383698, -0.06393485516309738, -0.09165395796298981, 0.029276864603161812, 0.03104468062520027, -0.029242563992738724, -0.06900829821825027, 0.052912745624780655, -0.03176172822713852, -0.047871656715869904, 0.02038549818098545, -0.08826538920402527, 0.1687355935573578, 0.07821567356586456, -0.10578005015850067, 0.15204767882823944, 0.05072487145662308, -0.004892565775662661, 0.017492305487394333, -0.013364226557314396, 0.23395265638828278, -0.0927862823009491, 0.07666494697332382, 0.0399884395301342, 0.046113308519124985, 0.04285028949379921, 0.23652131855487823, 0.04480087012052536, -0.10736704617738724, -0.008308250457048416, -0.09506039321422577, -0.06381680071353912, -0.17690159380435944, -0.10086647421121597, -0.1151549220085144, 0.011591397225856781, 0.037398796528577805, 0.03643046319484711, -0.0447874590754509, 0.10047801584005356, -0.022473052144050598, 0.042845916002988815, 0.0003798399702645838, 0.08044110983610153, 0.2079017609357834, 0.014967772178351879, 0.01000895444303751, -0.10865925252437592, -0.038098473101854324, 0.13219352066516876, 0.014488873071968555, 0.011895927600562572, -0.030417300760746002, 0.1523972451686859, 0.02403484284877777, 0.13397344946861267, -0.00682038115337491, 0.057742755860090256, -0.030361013486981392, 0.0498015321791172, -0.04270019009709358, -0.04234267771244049, -0.12948568165302277, 0.027171410620212555, 0.03631297126412392, -0.021390698850154877, 0.004772169515490532, -0.025968337431550026, 0.11882177740335464, -0.016091108322143555, 0.007292744237929583, -0.26407402753829956, -0.07778239995241165, 0.037543170154094696, 0.12293568253517151, -0.07145556807518005, -0.05223512277007103, 0.11707641929388046, 0.01967773213982582, 0.05395512282848358, 0.033912088721990585, 0.09089209884405136, -0.11004754155874252, -0.01811053603887558, -0.02011820301413536, 0.08192507922649384, -0.021608782932162285, 0.026929374784231186, -0.20401757955551147, 0.1316339671611786, 0.06025666743516922, 0.05466577783226967, -0.08281344920396805, -0.018034638836979866, -0.007302038837224245, 0.16828763484954834, 0.03982972726225853, 0.035707611590623856, -0.043225765228271484, -0.1744604855775833, -0.05541833117604256, 0.02887292578816414, 0.06497099995613098, -0.04360673949122429, 0.03965674340724945, 0.04564892128109932, -0.03735574334859848, -0.011837128549814224, -0.1509980857372284, -0.05120107904076576, -0.12950743734836578, 0.03785116598010063, 0.11731572449207306, -0.018418889492750168, 0.060865603387355804, -0.06616318225860596, -0.15206097066402435, 0.06250504404306412, -0.15037569403648376, -0.09175822883844376, -0.146044060587883, 0.09687233716249466, 0.009660039097070694, -0.046418655663728714, -0.1032843366265297, -0.017699914053082466, -0.039820753037929535, -0.0049587516114115715, -0.11557990312576294, 0.13254916667938232, -0.1096481904387474, -0.0642404556274414, -0.14581812918186188, 0.13330724835395813, 0.06810043752193451, -0.004304264672100544, -0.013754479587078094, 0.0531047023832798, -0.06490852683782578, -0.047288961708545685, 0.041497014462947845, 0.15160000324249268, -0.04975293204188347, 0.07769591361284256, -0.030248243361711502, -0.06695030629634857, -0.012812178581953049, -0.0258034635335207, 0.030192021280527115, 0.02106359601020813, -0.02765021286904812, 0.06296579539775848, 0.15329581499099731, -0.06467487663030624, -0.1682397574186325, -0.032632868736982346, -0.0002408664586255327, 0.06686126440763474, -0.002043344546109438, -0.15684747695922852, 0.004907824099063873, 0.02877132035791874, 0.022405726835131645, 0.06449411809444427, -0.18062055110931396, -0.08918287605047226, 0.01920163631439209, 0.05859304592013359, 0.1460525244474411, -0.057328350841999054, -0.06727134436368942, -0.00933898240327835, -0.03181680291891098, 0.14796213805675507, 0.0400063619017601, 0.02774113416671753, 0.024289270862936974, 0.1156088337302208, 0.04785596951842308, 0.015336931683123112, 0.13956555724143982, -0.052027612924575806, 0.03134624660015106, -0.09238413721323013, -0.017021289095282555, 0.07883995026350021, 0.004327645059674978, 0.1615886390209198, 0.002447937149554491, -0.013422570191323757, -0.1182689443230629, 0.003253870876505971, -0.07573902606964111, 0.014988227747380733, -0.020379554480314255, 0.019933553412556648, -0.1419028341770172, 0.02068319544196129, 0.03887813165783882, 0.013697576709091663, 0.03236677497625351, -0.007587986998260021, -0.05018838495016098, 0.009151832200586796, 0.19727455079555511, 0.012259499169886112, 0.034860484302043915, 0.024113459512591362, -0.026471693068742752, 0.09541822224855423, -0.17621150612831116, -0.0016268020262941718, 0.1554950475692749, -0.03527289256453514, 0.09648590534925461, -0.0013046753592789173, -0.14695331454277039, 0.012852589599788189, 0.14344197511672974, -0.13455936312675476, -0.0464351512491703, -0.01082137506455183, -0.06536895036697388, -0.02707045152783394, -0.01304259616881609, 0.08160269260406494, 0.00964136328548193, -0.006068876013159752, 0.01700708456337452, 0.03701472654938698, 0.052390001714229584, 0.03929181396961212, 0.04117552563548088, -0.07767418026924133, -0.08130907267332077, -0.035663310438394547, 0.1103803887963295, -0.1014883890748024, 0.04540211334824562, 0.0842430517077446, -0.13515020906925201, -0.05869083106517792, -0.0003349303442519158, 0.15324708819389343, -0.02843184396624565, -0.10183749347925186, -0.09542946517467499, -0.08619265258312225, 0.013672278262674809, 0.08088921755552292, 0.03658059611916542, 0.027343103662133217, -0.05856724828481674, 0.008239621296525002, -0.05296063795685768, 0.1199839636683464, -0.003091382561251521, -0.009026552550494671, -0.03803405910730362, -0.07890665531158447, -0.014949466101825237, -0.01692989654839039, -0.031429473310709, -0.056613489985466, -0.009385556913912296, -0.022926419973373413, 0.033855922520160675, 0.017693059518933296, -0.11132252216339111, 0.0055745188146829605, 0.02318457141518593, -0.08656135946512222, -0.03226777911186218, -0.05829908326268196, -0.05802997946739197, -0.008074823766946793, -0.00036993526737205684, 0.0960225835442543, -0.13754284381866455, -0.07589007169008255, 0.03423435613512993, -0.025521939620375633, 0.058081015944480896, 0.03506233170628548, -0.041454095393419266, -0.008213448338210583, -0.24813391268253326, -0.0016531640430912375, 0.09538009017705917, 0.07484035938978195, 0.04361307993531227, 0.01888127624988556, 0.0051379152573645115, 0.06974489986896515, -0.039465539157390594, 0.015547378920018673, -0.002365518594160676, -0.029230771586298943, 0.09360817074775696, 0.04434845969080925, -0.09881296008825302, -0.05218729004263878, 0.08456883579492569, -0.0015372596681118011, 0.00403578020632267, 0.16310492157936096, -0.09198551625013351, -0.012453923001885414, -0.096471406519413, -0.010539455339312553, 0.05564480647444725, -0.051736682653427124, -0.19158296287059784, -0.08169735968112946, 0.06457938253879547, -0.0424538254737854, 0.10376321524381638, 0.11645253002643585, -0.15064577758312225, 0.009459508582949638, 0.20479702949523926, -0.07466038316488266, 0.009205588139593601, 0.030517781153321266, -0.012578142806887627, -0.010250329039990902, -0.026028865948319435, -0.012672758661210537, 0.07143740355968475, -0.008095587603747845, 0.04103666916489601, 0.014023533090949059, 0.23318494856357574, 0.03045923262834549, 0.07799569517374039, 0.09112440049648285, 0.06808749586343765, 0.045018188655376434, -0.09772476553916931, 0.03638813644647598, 0.027912147343158722, 0.17085276544094086, 0.04355676844716072, -0.10038740932941437, 0.04043658450245857, -0.010550551116466522, -0.02775052934885025, 0.007612306624650955, -0.2566560208797455, -0.09150943905115128, -0.1290137767791748, -0.00978982076048851, -0.13315635919570923, -0.048154931515455246, -0.00895007699728012, 0.02219451032578945, -0.04921847581863403, -0.025579098612070084, 0.03173953294754028, -0.09171460568904877, 0.09323599189519882, -0.03760363161563873, 0.013321410864591599, -0.07975270599126816, -0.007146838121116161, -0.015221175737679005, 0.08005783706903458, 0.018762875348329544, 0.09748277068138123, 0.058009542524814606, -0.026672085747122765, -0.12960951030254364, -0.10197806358337402, -0.010223179124295712, 0.037992607802152634, -0.0683739110827446, -0.036944784224033356, 0.07259408384561539, 0.022295184433460236, 0.043983206152915955, 0.06009416654706001, 0.08438461273908615, 0.08500872552394867, -0.11278324574232101, -0.07911652326583862, -0.04919150099158287, 0.027347370982170105, 0.03280489891767502, -0.12902763485908508, -0.005451047793030739, 0.13266880810260773, 0.18778648972511292, -0.09192220121622086, -0.016399020329117775, -0.06180857494473457, 0.02752210758626461, 0.0037284004501998425, -0.010518193244934082, 0.05456773564219475, 0.15496544539928436, 0.0028528564143925905, -0.03378075361251831, -0.06301620602607727, 0.0037392403464764357, -0.004186910577118397, -0.06362169981002808, 0.018603457137942314, -0.06695058941841125, -0.13639076054096222, 0.08915651589632034, -0.024520108476281166, -0.08519741892814636, 0.11355174332857132, -0.11633624136447906, -0.06338587403297424, -0.06295352429151535, 0.02277456223964691, 0.1523192673921585, 0.008198525756597519, -0.06464327126741409, -0.06695092469453812, 0.1619103103876114, -0.021731851622462273, -0.14281204342842102, 0.033105406910181046, 0.08692187070846558, -0.13386951386928558, 0.07012616842985153, 0.005973268300294876, 0.11352960765361786, 0.09571394324302673, -0.030927950516343117, -0.0800967589020729, -0.008855409920215607, 0.05114296078681946, 0.10514745116233826, 0.03519098088145256, -0.003275715047493577, 0.06370310485363007, 0.030370010063052177, 0.15865936875343323, 0.043178945779800415, 0.06971795111894608, 0.009246832691133022, -0.04045301303267479, -0.06276094913482666, 0.10632182657718658, -0.0774449035525322, 0.09401040524244308, 0.06736987829208374, 0.011731784790754318, -0.015380952507257462, -0.021756380796432495, -0.020377375185489655, 0.016968252137303352, -0.019129248335957527, -0.014779376797378063, -0.057587526738643646, -0.031347259879112244, 0.07931927591562271, 0.06646083295345306, -0.12009096890687943, -0.024965716525912285, -0.05911426246166229, -0.0033367243595421314, -0.06363508105278015, 0.07924347370862961, 0.06352107971906662, 0.011270402930676937, -0.0509132519364357, -0.002572490368038416, 0.0292213037610054, 0.09234054386615753, -0.002543815178796649, -0.006688833236694336 ]
64595291dc520a934370c68a37f0a485b37cde02
# Dataset Card for "namuwiki_20240125_596861" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
jamesagilesoda/kowiki_20240125_596861
[ "region:us" ]
2024-01-25T10:33:31+00:00
{"configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}], "dataset_info": {"features": [{"name": "id", "dtype": "string"}, {"name": "url", "dtype": "string"}, {"name": "title", "dtype": "string"}, {"name": "lang", "dtype": "string"}, {"name": "date", "dtype": "string"}, {"name": "text", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 1031011524, "num_examples": 596861}], "download_size": 569215919, "dataset_size": 1031011524}}
2024-01-25T13:43:06+00:00
[]
[]
TAGS #region-us
# Dataset Card for "namuwiki_20240125_596861" More Information needed
[ "# Dataset Card for \"namuwiki_20240125_596861\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"namuwiki_20240125_596861\"\n\nMore Information needed" ]
[ 6, 21 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"namuwiki_20240125_596861\"\n\nMore Information needed" ]
[ -0.07479029893875122, 0.2436167150735855, -0.0012027628254145384, 0.01465258002281189, 0.08579190820455551, 0.11401020735502243, 0.10352608561515808, 0.0760132223367691, 0.10292396694421768, 0.034109823405742645, 0.1772221326828003, 0.04819763824343681, 0.1472119837999344, 0.15328706800937653, -0.006717390846461058, -0.08321360498666763, 0.0631575882434845, 0.035101961344480515, 0.011171752586960793, 0.0646805465221405, -0.03687083348631859, -0.05998503789305687, 0.10574525594711304, -0.06186893209815025, -0.22000233829021454, 0.08734015375375748, -0.04923713207244873, -0.05549320951104164, 0.054086461663246155, -0.07014109194278717, 0.17703957855701447, -0.023148687556385994, 0.019456379115581512, -0.1621197611093521, -0.004509972874075174, -0.007451026234775782, -0.0592823326587677, 0.031319987028837204, 0.044931184500455856, -0.01324152946472168, 0.011895574629306793, -0.00641281483694911, -0.029451612383127213, -0.015360455960035324, -0.1563897281885147, -0.17103338241577148, -0.15613208711147308, 0.0296788290143013, 0.013444929383695126, 0.009347506798803806, 0.07686009258031845, 0.17014916241168976, -0.06449245661497116, 0.02666635625064373, 0.06681451201438904, -0.10337317734956741, 0.05321986600756645, 0.1944364756345749, -0.03848423808813095, 0.05096592754125595, -0.0037351802457123995, 0.04994027689099312, 0.10975220054388046, -0.02931356057524681, -0.14796634018421173, 0.007385272067040205, -0.08226867020130157, 0.0734480544924736, 0.010723238810896873, -0.07207605987787247, 0.32689815759658813, 0.06629670411348343, 0.009445657953619957, 0.06473331898450851, -0.0026330130640417337, -0.04116323962807655, 0.012965001165866852, 0.042528100311756134, 0.005844877567142248, 0.010850654914975166, 0.07366492599248886, 0.05195612832903862, -0.08882448822259903, -0.133147731423378, -0.13137611746788025, 0.05069826543331146, -0.038543786853551865, 0.1808326691389084, -0.20610946416854858, -0.082295723259449, -0.0740245133638382, -0.0031663074623793364, -0.01452723890542984, -0.06265734881162643, 0.04111894220113754, -0.01980571262538433, 0.031575944274663925, -0.036638032644987106, 0.08745685964822769, -0.0254239309579134, -0.016028473153710365, 0.08544189482927322, -0.03988110274076462, 0.04934653267264366, 0.15636363625526428, -0.08863192796707153, -0.04121091961860657, 0.022731667384505272, -0.07071059197187424, -0.1479903906583786, 0.011035765521228313, -0.10235003381967545, -0.09084292501211166, -0.010310590267181396, -0.11312998831272125, 0.07949966192245483, -0.06453076750040054, -0.10524360090494156, -0.04804260656237602, -0.04839766025543213, 0.22396552562713623, -0.06005753204226494, 0.009127210825681686, 0.024846401065587997, -0.07923801243305206, 0.06908051669597626, -0.09384272247552872, -0.024695103988051414, 0.015886669978499413, 0.08555227518081665, -0.107352115213871, -0.011998875997960567, -0.01989693008363247, 0.01056051068007946, 0.1346910446882248, -0.18050925433635712, 0.08286669105291367, -0.07521475851535797, -0.24650660157203674, 0.026951847597956657, 0.03931620717048645, -0.044547852128744125, 0.17013965547084808, 0.014951003715395927, 0.09093256294727325, 0.021648166701197624, -0.01465623453259468, 0.04923100024461746, -0.06255564838647842, 0.02187078259885311, -0.0041560824029147625, 0.07702451199293137, -0.20493920147418976, 0.009777696803212166, -0.07481011748313904, 0.053479429334402084, 0.09962709248065948, -0.07991420477628708, -0.12782351672649384, 0.04610748961567879, -0.08471241593360901, 0.014772584661841393, -0.10480751842260361, 0.014950500801205635, 0.02520369179546833, 0.07272035628557205, -0.13792364299297333, -0.01323665026575327, 0.24642181396484375, -0.15811088681221008, -0.165960893034935, -0.020558258518576622, 0.008416756056249142, -0.008474629372358322, -0.01632671244442463, 0.31553539633750916, -0.02558860369026661, -0.09908336400985718, -0.04523644596338272, 0.10595149546861649, -0.188918799161911, -0.32265397906303406, 0.05616753548383713, -0.035854872316122055, -0.1314789354801178, 0.018698927015066147, 0.20628464221954346, 0.015146942809224129, -0.03274884819984436, -0.08901131898164749, -0.00535435089841485, -0.1455666422843933, 0.03070254437625408, 0.02236473560333252, 0.04328957572579384, -0.020103197544813156, 0.13185392320156097, 0.04090747609734535, 0.11490188539028168, 0.02653418853878975, -0.04509147256612778, -0.07075046747922897, 0.029809333384037018, -0.12670119106769562, -0.026153748854994774, -0.07682449370622635, -0.07579545676708221, 0.0009495404665358365, -0.09891411662101746, -0.00757843442261219, -0.0945209488272667, 0.1057380884885788, -0.02080642245709896, 0.03297269716858864, 0.04470336437225342, 0.09190300852060318, 0.08087631314992905, 0.04975998029112816, 0.049603428691625595, 0.013697030022740364, -0.06455004960298538, -0.09709172695875168, -0.1130777969956398, -0.07197593152523041, 0.0926661565899849, 0.059962425380945206, 0.027369661256670952, -0.05676717311143875, 0.09704829752445221, 0.048485685139894485, 0.04604820907115936, -0.04176801070570946, 0.03217471390962601, -0.05548388883471489, -0.023409152403473854, 0.024843858554959297, -0.055360689759254456, 0.18723681569099426, 0.1236780658364296, -0.028923122212290764, 0.020251300185918808, 0.028324715793132782, 0.06501632928848267, -0.04388720542192459, -0.010619179345667362, 0.0888400673866272, -0.22695252299308777, -0.057110171765089035, 0.04094233363866806, -0.07007749378681183, 0.08611016720533371, 0.07906536012887955, -0.04063311591744423, -0.16648873686790466, 0.05389627441763878, 0.19108812510967255, -0.2869511842727661, 0.16926072537899017, 0.19527573883533478, 0.11708109080791473, 0.1747947335243225, -0.05705919489264488, -0.10249139368534088, -0.017599208280444145, -0.04793738201260567, -0.053619738668203354, 0.16113029420375824, -0.04331235960125923, 0.0038103058468550444, 0.06857603043317795, 0.038844600319862366, 0.09558296948671341, -0.0976414829492569, -0.1470666080713272, -0.010842401534318924, 0.016330119222402573, -0.18085522949695587, 0.037658415734767914, -0.03711168095469475, 0.04844686761498451, 0.060396838933229446, 0.04711434245109558, 0.1305098980665207, 0.025591012090444565, -0.008248838596045971, 0.11441973596811295, -0.16011011600494385, -0.2964024245738983, -0.08544640243053436, -0.14824868738651276, 0.028085006400942802, -0.03096229024231434, -0.07320523262023926, -0.22347259521484375, -0.024888483807444572, 0.03615648299455643, -0.13049329817295074, -0.20791420340538025, -0.04546724632382393, 0.09511909633874893, 0.01454327255487442, -0.07995322346687317, -0.06693195551633835, 0.005141738336533308, -0.05118287727236748, 0.12695719301700592, 0.10738294571638107, -0.13844197988510132, 0.14215604960918427, 0.07994358241558075, -0.06014161556959152, 0.11742165684700012, 0.03308594599366188, 0.06907811760902405, -0.039163537323474884, -0.03415701538324356, 0.1322302669286728, 0.0024731457233428955, 0.04298875108361244, 0.0011744056828320026, 0.08106616139411926, -0.14981774985790253, -0.0299675390124321, -0.011394251137971878, -0.1779574602842331, -0.25690940022468567, -0.12407779693603516, -0.03546987846493721, 0.1436605155467987, 0.100594662129879, 0.05612548068165779, 0.003352595027536154, 0.10294061154127121, 0.1760413944721222, -0.036430202424526215, -0.19942377507686615, -0.02036001905798912, -0.0913875624537468, 0.02797771245241165, -0.046242937445640564, -0.1645030826330185, 0.05376916378736496, 0.10839158296585083, 0.2414398193359375, 0.14609333872795105, 0.17142368853092194, 0.11913582682609558, 0.009373234584927559, 0.1779959499835968, 0.11960001289844513, 0.145721897482872, 0.0901370421051979, -0.07429041713476181, 0.049152035266160965, 0.00529356999322772, -0.057563137263059616, 0.008614816702902317, 0.03472448140382767, -0.135165736079216, 0.08139156550168991, -0.10515103489160538, -0.05292772501707077, -0.1741046905517578, 0.11039730906486511, -0.21050910651683807, 0.06455553323030472, -0.03174353390932083, 0.1402801126241684, -0.09969384968280792, 0.08539516478776932, 0.03202622011303902, -0.04382310435175896, 0.10899852216243744, 0.05152782052755356, 0.032375458627939224, -0.05143124982714653, -0.047014497220516205, -0.08227474987506866, -0.08602578192949295, -0.023738762363791466, 0.09473015367984772, -0.07309459894895554, 0.25424572825431824, 0.04507731646299362, -0.002430945634841919, -0.12147979438304901, -0.07294923812150955, -0.0175890251994133, -0.004686943721026182, 0.1371011734008789, 0.07539300620555878, -0.18959949910640717, -0.17736487090587616, -0.06740099936723709, -0.03811271861195564, 0.13427284359931946, 0.15266801416873932, -0.15332536399364471, 0.07572773844003677, 0.038284074515104294, -0.08189491927623749, -0.0920775830745697, -0.06814194470643997, -0.10502878576517105, 0.00375275663100183, 0.0807204619050026, -0.10252292454242706, 0.04983589053153992, 0.041867177933454514, -0.06573769450187683, 0.05657051131129265, 0.10645955801010132, 0.005767990835011005, -0.09138397872447968, -0.06476598232984543, 0.11784924566745758, -0.01534237340092659, -0.013929128646850586, -0.01636667735874653, -0.07265252619981766, -0.04717806726694107, -0.1883016973733902, 0.0885658860206604, -0.046630460768938065, 0.09267344325780869, -0.06557317078113556, 0.050920918583869934, 0.019815223291516304, -0.03851344808936119, -0.023163503035902977, 0.03519436717033386, -0.011268670670688152, -0.06011148914694786, 0.20597311854362488, -0.07497047632932663, 0.1116078570485115, 0.1489771157503128, 0.15688371658325195, 0.04962380602955818, 0.07625611871480942, -0.10490565747022629, 0.15742550790309906, 0.07586654275655746, -0.02892841026186943, 0.17538177967071533, 0.21550215780735016, -0.02731148712337017, -0.21453069150447845, -0.02492895908653736, -0.15755167603492737, -0.058432016521692276, -0.010294409468770027, -0.23766422271728516, 0.1447969526052475, 0.1873067170381546, -0.030697396025061607, 0.26309892535209656, -0.11804293841123581, 0.02021402306854725, 0.1363014131784439, 0.08989562839269638, 0.3478189706802368, -0.10277506709098816, -0.056691162288188934, -0.07653753459453583, -0.27708539366722107, 0.29371678829193115, -0.05134129896759987, 0.018221629783511162, -0.05888963118195534, 0.14455299079418182, -0.007568687200546265, -0.0775170624256134, 0.1589476764202118, 0.010485070757567883, 0.07411281764507294, -0.09700873494148254, -0.09606125950813293, 0.12064434587955475, -0.047546226531267166, 0.06310826539993286, 0.005726422183215618, 0.01697353832423687, -0.16813918948173523, -0.010691631585359573, 0.005419616587460041, -0.003609529696404934, 0.036537643522024155, -0.056394170969724655, -0.07003326714038849, 0.025918278843164444, -0.09905688464641571, -0.013100785203278065, 0.15904082357883453, 0.04295797646045685, 0.002376370830461383, 0.09085597097873688, -0.019066056236624718, -0.0654829889535904, 0.07619014382362366, -0.07423170655965805, -0.11968069523572922, 0.11477751284837723, -0.2749488055706024, 0.018329259008169174, 0.08864602446556091, 0.04990188032388687, 0.02134922705590725, 0.014711807481944561, -0.08129668980836868, 0.02236991934478283, 0.12021941691637039, -0.08262903243303299, -0.03931214287877083, 0.09081769734621048, 0.0005703202914446592, 0.08804860711097717, 0.09962812811136246, 0.004305706359446049, 0.026729444041848183, 0.0006297161453403533, -0.04459179937839508, 0.06052980199456215, -0.09855927526950836, 0.037327591329813004, 0.10133031010627747, 0.040673770010471344, -0.15956275165081024, 0.2477676272392273, 0.02886105701327324, -0.10855849832296371, 0.08049160242080688, 0.04483799636363983, -0.08630149811506271, -0.05859724059700966, -0.07059763371944427, 0.18952368199825287, -0.07864698767662048, -0.21902517974376678, 0.08758189529180527, 0.006898774299770594, 0.028723647817969322, 0.13626064360141754, 0.003490159520879388, 0.07555249333381653, 0.03341648355126381, -0.042560771107673645, -0.09848573058843613, -0.13898295164108276, -0.09144716709852219, 0.02111104689538479, -0.12543471157550812, -0.22716236114501953, -0.00677020289003849, 0.15956662595272064, -0.0384356752038002, -0.06277009099721909, -0.12214364111423492, 0.07372374832630157, -0.09849093109369278, 0.044401418417692184, -0.07474618405103683, 0.01867152750492096, -0.00237252633087337, 0.018764901906251907, -0.03688173368573189, 0.004005590919405222, -0.10083786398172379, 0.09270866215229034, 0.07096810638904572, -0.01455753855407238, -0.054551348090171814, 0.011393985711038113, 0.07916824519634247, 0.08044717460870743, 0.03658221662044525, 0.11418774724006653, 0.03843855485320091, 0.11307806521654129, -0.0573115274310112, -0.04474036023020744, 0.021504433825612068, 0.031919997185468674, 0.05753554776310921, 0.11803647875785828, -0.031227581202983856, 0.057432934641838074, -0.06330766528844833, 0.0435199998319149, 0.01139745395630598, -0.06633953005075455, 0.00030008776229806244, -0.1224551647901535, -0.11164415627717972, -0.037567541003227234, -0.015659548342227936, 0.22214539349079132, -0.052190087735652924, -0.033348605036735535, 0.08777154237031937, 0.005509728565812111, 0.024245047941803932, -0.04783537611365318, -0.008049976080656052, -0.0969017818570137, -0.025418896228075027, 0.049713827669620514, 0.0011925774160772562, -0.07811371237039566, 0.3680056631565094, -0.10116065293550491, -0.203091561794281, -0.05345363914966583, 0.06782569736242294, -0.17805682122707367, -0.02877848781645298, 0.35624459385871887, 0.059345707297325134, -0.07195296138525009, -0.010038369335234165, 0.06349214166402817, -0.055386029183864594, 0.15365268290042877, 0.039233334362506866, 0.11726623773574829, -0.012248815037310123, -0.011664959602057934, 0.0356915257871151, -0.05090794339776039, -0.03574655205011368, -0.004613512195646763, -0.09831805527210236, 0.020344624295830727, 0.08975140005350113, -0.0855756402015686, 0.08938424289226532, 0.044085171073675156, -0.030245743691921234, -0.0471426285803318, -0.0927860289812088, -0.08292146027088165, -0.18420878052711487, -0.05260130763053894, -0.0756591409444809, 0.09924960881471634, -0.0026056496426463127, 0.023760322481393814, 0.26503869891166687, 0.08867646753787994, 0.012095502577722073, 0.06539882719516754, 0.02112153358757496, 0.09097134321928024, -0.019327273592352867, -0.021388687193393707, -0.10765126347541809, 0.023787476122379303, -0.06137549504637718, 0.0476590059697628, -0.09957070648670197, -0.017472054809331894, -0.017744680866599083, 0.04050404205918312, 0.019854914397001266, -0.07476187497377396, -0.05352324992418289, -0.05470176786184311, 0.08413185924291611, -0.06225457042455673, 0.04490337520837784, 0.004481770563870668, 0.05674898996949196, 0.01885070651769638, 0.011876212432980537, 0.035262856632471085, 0.07644286006689072, -0.00712616229429841, 0.021584276109933853, -0.05334961786866188, 0.07796153426170349, -0.015931207686662674, -0.03099774196743965, -0.04784170910716057, 0.16881340742111206, 0.16050384938716888, 0.03841648995876312, -0.016288992017507553, 0.025245893746614456, 0.04636489972472191, -0.03256828710436821, 0.16891418397426605, -0.0073257931508123875, -0.014594035223126411, -0.008390741422772408, -0.08920948952436447, -0.041319262236356735, -0.09067603200674057, -0.03072499670088291, 0.033593323081731796, 0.08459111303091049, -0.06509801745414734, -0.11885008960962296, 0.1200098991394043, -0.21536314487457275, 0.13808858394622803, 0.1979464590549469, -0.12680096924304962, -0.09827671945095062, -0.013257570564746857, 0.05417506769299507, 0.017460359260439873, -0.014153411611914635, -0.0831439420580864, -0.02977508306503296, -0.1389421671628952, 0.049717240035533905, -0.36023077368736267, -0.16039113700389862, -0.041037965565919876, 0.022653698921203613, 0.1413601040840149, 0.005670961458235979, 0.0895003229379654, 0.04098799079656601, 0.04629362002015114, -0.09793535619974136, 0.08098502457141876, -0.0008011568570509553, 0.07949772477149963, -0.08299107849597931, 0.0019764688331633806, -0.023652123287320137, -0.05879658833146095, 0.016372069716453552, -0.10244923830032349, -0.09525083750486374, 0.22762645781040192, 0.06992756575345993, 0.010556736029684544, 0.017684927210211754, -0.07346925139427185, 0.1347455233335495, 0.005851267836987972, -0.0013007076922804117, -0.039363276213407516, -0.017592065036296844, 0.11737914383411407, 0.007134565617889166, -0.1792551875114441, -0.08305740356445312, 0.012089026160538197, -0.0820217877626419, 0.045911263674497604, 0.04998021945357323, -0.053966015577316284, 0.09280674159526825, -0.09852646291255951, 0.031928375363349915, -0.012106385082006454, 0.03036046400666237, 0.13391248881816864, 0.07382233440876007, -0.03094247169792652, -0.02273283526301384, 0.07391341775655746, 0.01350356824696064, -0.01539328508079052, -0.10655879974365234 ]
ee9c7cd1a2113e37cb3f2b000203b8045cfbdd5d
<p align="center"> <h3 align="center"><a href="https://arxiv.org/abs/2303.14465" target='_blank'> <strong>Equivariant Similarity for Vision-Language Foundation Models</strong> </a></h3> <h2 align="center">ICCV 2023</h2> <p align="center"> <a href="https://scholar.google.com/citations?hl=en&user=wFduC9EAAAAJ" target='_blank'>Tan Wang</a>,&nbsp; <a href="https://scholar.google.com/citations?hl=en&user=LKSy1kwAAAAJ" target='_blank'>Kevin Lin</a>,&nbsp; <a href="https://scholar.google.com/citations?hl=en&user=WR875gYAAAAJ" target='_blank'>Linjie Li</a>,&nbsp; <a href="https://scholar.google.com/citations?hl=en&user=legkbM0AAAAJ" target='_blank'>Chung-Ching Lin</a>,&nbsp; <a href="https://scholar.google.com/citations?hl=en&user=rP02ve8AAAAJ" target='_blank'>Zhengyuan Yang</a>,&nbsp; <a href="https://scholar.google.com/citations?hl=en&user=YG0DFyYAAAAJ" target='_blank'>Hanwang Zhang</a>,&nbsp; <a href="https://scholar.google.com/citations?hl=en&user=bkALdvsAAAAJ" target='_blank'>Zicheng Liu</a>,&nbsp; <a href="https://scholar.google.com/citations?hl=en&user=cDcWXuIAAAAJ" target='_blank'>Lijuan Wang</a> <br> Nanyang Technological University, &nbsp; Microsoft Corporation </p> </p> <br /><br /> # About This study explores the concept of equivariance in vision-language foundation models (VLMs), focusing specifically on the multimodal similarity function that is not only the major training objective but also the core delivery to support downstream tasks. Unlike the existing image-text similarity objective which only categorizes matched pairs as similar and unmatched pairs as dissimilar, equivariance also requires similarity to vary faithfully according to the semantic changes. Our key contributions are three-fold: 1. A novel benchmark named **EqBen** (Equivariant Benchmark) to benchmark VLMs with **visual-minimal change** samples. 2. A plug-and-play regularization loss **EqSim** (Equivariant Similarity Learning) to improve the equivariance of current VLMs. 3. Toolkit provides an **one-stop evaluation**: not only for EqBen, but also for previous related benchmarks (Winoground, VALSE, etc).<br> # Data Download - **Download images from huggingface hub:** Please check the Files and versions tab above. - Full-Test Set: the user can download the EqBen raw **[image data](https://drive.google.com/file/d/1e608uhd36ak_v7SnlMVaYcekBc4gBqzn/view?usp=drive_link)** (tar.gz file, ~100G) and [**annotation (after randomize)**](https://drive.google.com/file/d/1-CWEuZ5F0KQ4d94Y9rRtBsMIcqb8V7nm/view?usp=sharing) (200M) via Google Drive. **[UPDATE-2023-09]** The original annotation is the annotation after randomize (non-public) for the total fairness. And the users are required to upload the results json/np file to CodaLab for getting the final results. Due to the unstability of CodaLab, we decide to public the whole original annotation. This annotation file formalized similar to *Winoground* and can be downloaded [**here**](https://drive.google.com/file/d/1gNR4K2Cv4rbnjVRdHuBV6PuoJ5MlRXnZ/view?usp=sharing). - **Light** Full-Test Set: to improve the usability, we also provide a light version of EqBen by converting all the png image to the jpg using `convert`. Feel free to download [here](https://entuedu-my.sharepoint.com/:u:/g/personal/tan317_e_ntu_edu_sg/EcHBRcch6KREvzvGgrN67FMBUSVV4QPTQUiew0bxjcitFw?e=xiJiYL). But please note that you may make some small revisement to the path in the annotation (change the `.png` to `.jpg`). - Sub-Test Set: we also provide a 10% subset (~25K image-text pairs) for the ease of visualization and validation. The label of the EqBen sub-set is **opensource** and the **format follows the winoground style**. But please note that the samples in the subset is **randomly sorted** and not be classified to each category. Please down the raw **[image data](https://drive.google.com/file/d/13Iuirsvx34-9F_1Mjhs4Dqn59yokyUjy/view?usp=sharing)** (tar.gz file, ~10G) and [**annotation**](https://drive.google.com/file/d/18BSRf1SnBtGiEc42mzRLirXaBLzYE5Tt/view?usp=sharing) via Google Drive. --- * This is the unofficial distribution of images in the eqben benchmark. * For the official repository, please visit [https://github.com/Wangt-CN/EqBen](https://github.com/Wangt-CN/EqBen). * Some part of this README.md is taken from the official repository
ytaek-oh/eqben-images
[ "license:apache-2.0", "arxiv:2303.14465", "region:us" ]
2024-01-25T10:50:11+00:00
{"license": "apache-2.0"}
2024-01-25T12:51:23+00:00
[ "2303.14465" ]
[]
TAGS #license-apache-2.0 #arxiv-2303.14465 #region-us
<p align="center"> <h3 align="center"><a href="URL target='_blank'> <strong>Equivariant Similarity for Vision-Language Foundation Models</strong> </a></h3> <h2 align="center">ICCV 2023</h2> <p align="center"> <a href="URL target='_blank'>Tan Wang</a>,&nbsp; <a href="URL target='_blank'>Kevin Lin</a>,&nbsp; <a href="URL target='_blank'>Linjie Li</a>,&nbsp; <a href="URL target='_blank'>Chung-Ching Lin</a>,&nbsp; <a href="URL target='_blank'>Zhengyuan Yang</a>,&nbsp; <a href="URL target='_blank'>Hanwang Zhang</a>,&nbsp; <a href="URL target='_blank'>Zicheng Liu</a>,&nbsp; <a href="URL target='_blank'>Lijuan Wang</a> <br> Nanyang Technological University, &nbsp; Microsoft Corporation </p> </p> <br /><br /> # About This study explores the concept of equivariance in vision-language foundation models (VLMs), focusing specifically on the multimodal similarity function that is not only the major training objective but also the core delivery to support downstream tasks. Unlike the existing image-text similarity objective which only categorizes matched pairs as similar and unmatched pairs as dissimilar, equivariance also requires similarity to vary faithfully according to the semantic changes. Our key contributions are three-fold: 1. A novel benchmark named EqBen (Equivariant Benchmark) to benchmark VLMs with visual-minimal change samples. 2. A plug-and-play regularization loss EqSim (Equivariant Similarity Learning) to improve the equivariance of current VLMs. 3. Toolkit provides an one-stop evaluation: not only for EqBen, but also for previous related benchmarks (Winoground, VALSE, etc).<br> # Data Download - Download images from huggingface hub: Please check the Files and versions tab above. - Full-Test Set: the user can download the EqBen raw image data (URL file, ~100G) and annotation (after randomize) (200M) via Google Drive. [UPDATE-2023-09] The original annotation is the annotation after randomize (non-public) for the total fairness. And the users are required to upload the results json/np file to CodaLab for getting the final results. Due to the unstability of CodaLab, we decide to public the whole original annotation. This annotation file formalized similar to *Winoground* and can be downloaded here. - Light Full-Test Set: to improve the usability, we also provide a light version of EqBen by converting all the png image to the jpg using 'convert'. Feel free to download here. But please note that you may make some small revisement to the path in the annotation (change the '.png' to '.jpg'). - Sub-Test Set: we also provide a 10% subset (~25K image-text pairs) for the ease of visualization and validation. The label of the EqBen sub-set is opensource and the format follows the winoground style. But please note that the samples in the subset is randomly sorted and not be classified to each category. Please down the raw image data (URL file, ~10G) and annotation via Google Drive. --- * This is the unofficial distribution of images in the eqben benchmark. * For the official repository, please visit URL * Some part of this URL is taken from the official repository
[ "# About\n\nThis study explores the concept of equivariance in vision-language foundation models (VLMs), focusing specifically on the multimodal similarity function that is not only the major training objective but also the core delivery to support downstream tasks. Unlike the existing image-text similarity objective which only categorizes matched pairs as similar and unmatched pairs as dissimilar, equivariance also requires similarity to vary faithfully according to the semantic changes. Our key contributions are three-fold:\n\n1. A novel benchmark named EqBen (Equivariant Benchmark) to benchmark VLMs with visual-minimal change samples.\n2. A plug-and-play regularization loss EqSim (Equivariant Similarity Learning) to improve the equivariance of current VLMs.\n3. Toolkit provides an one-stop evaluation: not only for EqBen, but also for previous related benchmarks (Winoground, VALSE, etc).<br>", "# Data Download\n\n- Download images from huggingface hub: Please check the Files and versions tab above.\n\n- Full-Test Set: the user can download the EqBen raw image data (URL file, ~100G) and annotation (after randomize) (200M) via Google Drive.\n[UPDATE-2023-09] The original annotation is the annotation after randomize (non-public) for the total fairness. And the users are required to upload the results json/np file to CodaLab for getting the final results. Due to the unstability of CodaLab, we decide to public the whole original annotation. This annotation file formalized similar to *Winoground* and can be downloaded here. \n\n- Light Full-Test Set: to improve the usability, we also provide a light version of EqBen by converting all the png image to the jpg using 'convert'. Feel free to download here. But please note that you may make some small revisement to the path in the annotation (change the '.png' to '.jpg').\n\n\n- Sub-Test Set: we also provide a 10% subset (~25K image-text pairs) for the ease of visualization and validation. The label of the EqBen sub-set is opensource and the format follows the winoground style. But please note that the samples in the subset is randomly sorted and not be classified to each category. Please down the raw image data (URL file, ~10G) and annotation via Google Drive.\n\n\n---\n\n* This is the unofficial distribution of images in the eqben benchmark.\n* For the official repository, please visit URL\n* Some part of this URL is taken from the official repository" ]
[ "TAGS\n#license-apache-2.0 #arxiv-2303.14465 #region-us \n", "# About\n\nThis study explores the concept of equivariance in vision-language foundation models (VLMs), focusing specifically on the multimodal similarity function that is not only the major training objective but also the core delivery to support downstream tasks. Unlike the existing image-text similarity objective which only categorizes matched pairs as similar and unmatched pairs as dissimilar, equivariance also requires similarity to vary faithfully according to the semantic changes. Our key contributions are three-fold:\n\n1. A novel benchmark named EqBen (Equivariant Benchmark) to benchmark VLMs with visual-minimal change samples.\n2. A plug-and-play regularization loss EqSim (Equivariant Similarity Learning) to improve the equivariance of current VLMs.\n3. Toolkit provides an one-stop evaluation: not only for EqBen, but also for previous related benchmarks (Winoground, VALSE, etc).<br>", "# Data Download\n\n- Download images from huggingface hub: Please check the Files and versions tab above.\n\n- Full-Test Set: the user can download the EqBen raw image data (URL file, ~100G) and annotation (after randomize) (200M) via Google Drive.\n[UPDATE-2023-09] The original annotation is the annotation after randomize (non-public) for the total fairness. And the users are required to upload the results json/np file to CodaLab for getting the final results. Due to the unstability of CodaLab, we decide to public the whole original annotation. This annotation file formalized similar to *Winoground* and can be downloaded here. \n\n- Light Full-Test Set: to improve the usability, we also provide a light version of EqBen by converting all the png image to the jpg using 'convert'. Feel free to download here. But please note that you may make some small revisement to the path in the annotation (change the '.png' to '.jpg').\n\n\n- Sub-Test Set: we also provide a 10% subset (~25K image-text pairs) for the ease of visualization and validation. The label of the EqBen sub-set is opensource and the format follows the winoground style. But please note that the samples in the subset is randomly sorted and not be classified to each category. Please down the raw image data (URL file, ~10G) and annotation via Google Drive.\n\n\n---\n\n* This is the unofficial distribution of images in the eqben benchmark.\n* For the official repository, please visit URL\n* Some part of this URL is taken from the official repository" ]
[ 22, 213, 387 ]
[ "passage: TAGS\n#license-apache-2.0 #arxiv-2303.14465 #region-us \n# About\n\nThis study explores the concept of equivariance in vision-language foundation models (VLMs), focusing specifically on the multimodal similarity function that is not only the major training objective but also the core delivery to support downstream tasks. Unlike the existing image-text similarity objective which only categorizes matched pairs as similar and unmatched pairs as dissimilar, equivariance also requires similarity to vary faithfully according to the semantic changes. Our key contributions are three-fold:\n\n1. A novel benchmark named EqBen (Equivariant Benchmark) to benchmark VLMs with visual-minimal change samples.\n2. A plug-and-play regularization loss EqSim (Equivariant Similarity Learning) to improve the equivariance of current VLMs.\n3. Toolkit provides an one-stop evaluation: not only for EqBen, but also for previous related benchmarks (Winoground, VALSE, etc).<br>" ]
[ -0.08731333166360855, 0.02858090028166771, -0.006264930125325918, 0.0478026419878006, -0.002122933743521571, 0.038128338754177094, 0.049254655838012695, 0.12553176283836365, 0.02634814754128456, -0.06122437119483948, 0.03997171297669411, 0.008619867265224457, 0.013555650599300861, 0.07863383740186691, 0.01913893036544323, -0.23709413409233093, 0.030561475083231926, 0.1114518940448761, 0.011096160858869553, -0.019476521760225296, 0.136090949177742, -0.09063652902841568, 0.03401142731308937, 0.07047893106937408, -0.03356209397315979, 0.1012304425239563, -0.06518612802028656, 0.010343773290514946, 0.15396885573863983, 0.07706929743289948, 0.03550844267010689, 0.04246694967150688, 0.008716794662177563, -0.3177400529384613, 0.03640226647257805, 0.0272197388112545, -0.025575030595064163, -0.01798107661306858, 0.11478972434997559, -0.09733963757753372, -0.014335792511701584, 0.024187121540308, 0.044704221189022064, -0.008191735483705997, -0.04943706467747688, -0.08128479868173599, -0.003316097427159548, -0.1075899749994278, 0.03927106410264969, 0.015193268656730652, 0.015519477427005768, 0.13561893999576569, -0.05785223841667175, 0.06384915113449097, 0.22420771420001984, -0.19575925171375275, -0.008279918693006039, 0.1379338800907135, -0.0075851744040846825, -0.1557321846485138, -0.028142595663666725, 0.062094446271657944, 0.05099189281463623, 0.08911316841840744, 0.10409943759441376, -0.046395592391490936, -0.11526800692081451, 0.004995664581656456, -0.1233125701546669, -0.0013110130093991756, 0.25345543026924133, 0.01583932153880596, -0.033820491284132004, -0.04140669107437134, -0.08304104208946228, 0.1626773327589035, 0.00035525215207599103, -0.0825754776597023, 0.06300830841064453, -0.00223201559856534, 0.12358911335468292, -0.09783101081848145, -0.11966796964406967, -0.06061122193932533, -0.046646907925605774, -0.026159118860960007, 0.09471739828586578, 0.033732473850250244, -0.037237029522657394, 0.09379912912845612, -0.09730894863605499, -0.06614498049020767, -0.04038313031196594, -0.04504243656992912, -0.12505751848220825, 0.01143302209675312, -0.0756915956735611, -0.08826099336147308, 0.11725438386201859, 0.13505975902080536, 0.09671946614980698, 0.019188160076737404, -0.010064219124615192, 0.04846851900219917, 0.047201789915561676, 0.14914190769195557, -0.014144311659038067, -0.005283657927066088, 0.07362467795610428, 0.03147197142243385, -0.025413475930690765, -0.013448575511574745, -0.0793609470129013, -0.100715771317482, 0.11366480588912964, 0.04812780022621155, 0.03800959885120392, 0.042635977268218994, -0.08672352135181427, -0.02012433111667633, -0.0823027566075325, -0.09645197540521622, -0.045414891093969345, -0.04115002229809761, -0.03228532895445824, -0.06071725860238075, 0.04132373258471489, 0.005391267593950033, -0.11927452683448792, -0.02904784493148327, -0.005562859121710062, -0.05861472338438034, -0.0996464267373085, -0.05585648864507675, 0.04238284006714821, -0.08565878868103027, 0.08016081154346466, -0.10412546247243881, -0.23448485136032104, 0.02033137157559395, 0.0451824776828289, -0.018808769062161446, -0.0398629829287529, -0.055982980877161026, -0.01025737076997757, -0.09234368801116943, -0.009771455079317093, 0.07039664685726166, -0.08612070977687836, -0.08343250304460526, 0.07340417057275772, 0.04328376427292824, -0.014848234131932259, -0.008870195597410202, -0.05904409661889076, 0.040224164724349976, 0.07885482162237167, 0.04774404689669609, -0.09914088249206543, -0.1667906492948532, 0.014774058014154434, -0.0007298021228052676, -0.07543903589248657, 0.04917771369218826, 0.02303421124815941, 0.03465147688984871, -0.2896301746368408, -0.0004183673008810729, -0.04924362897872925, -0.11992950737476349, -0.14019715785980225, 0.11670961230993271, -0.05533028393983841, -0.07339418679475784, 0.011106924153864384, 0.22074702382087708, 0.10086842626333237, -0.09554757177829742, 0.026902588084340096, 0.0811675414443016, -0.07049544155597687, 0.05763291195034981, 0.04865516349673271, 0.0565781444311142, 0.09585680067539215, 0.02963782660663128, -0.038702961057424545, -0.05356365442276001, -0.11853191256523132, -0.01840401254594326, -0.02405993454158306, -0.012729432433843613, -0.04166083410382271, -0.09324538707733154, -0.0022878546733409166, -0.048777658492326736, 0.0008125139866024256, -0.010307489894330502, 0.10776977986097336, -0.031775716692209244, 0.0684409812092781, -0.0777944028377533, 0.15329478681087494, -0.07873401790857315, 0.0060892049223184586, -0.10668475180864334, -0.09992387145757675, 0.08820327371358871, -0.03209675848484039, 0.022814897820353508, 0.2747547924518585, -0.06469517946243286, 0.06519079953432083, -0.021755486726760864, -0.06350268423557281, -0.10994269698858261, -0.010203134268522263, -0.004651890601962805, -0.15076272189617157, -0.010735627263784409, -0.09122923016548157, -0.13987499475479126, 0.03579729422926903, -0.02074175886809826, -0.03614778816699982, 0.1914142370223999, 0.04098711535334587, 0.07661740481853485, 0.010198294185101986, -0.012158959172666073, -0.04033421352505684, -0.014939168468117714, -0.004665904678404331, -0.026220304891467094, -0.04650818556547165, 0.16670718789100647, -0.020751142874360085, 0.26604604721069336, 0.0436089001595974, -0.15952274203300476, 0.13669759035110474, 0.008248893544077873, -0.027249718084931374, -0.02933715283870697, -0.042611971497535706, 0.004113505594432354, 0.2315867394208908, -0.0063751391135156155, 0.10825130343437195, -0.1437550038099289, -0.018798818811774254, -0.05891711264848709, -0.07150457054376602, -0.04921291023492813, 0.004481885116547346, 0.02463669888675213, -0.04458176717162132, 0.008457166142761707, -0.020843662321567535, -0.005552721209824085, 0.17470456659793854, -0.013143040239810944, -0.03334859758615494, -0.0004575207713060081, -0.06802460551261902, -0.07919184863567352, 0.21307991445064545, -0.10145336389541626, 0.06438998132944107, 0.037208594381809235, 0.10887704789638519, 0.1560440957546234, -0.12200991064310074, 0.04255937412381172, 0.048434291034936905, -0.06857042014598846, 0.029802346602082253, 0.04975869506597519, -0.00723463436588645, 0.06760042905807495, -0.07499305158853531, 0.012995248660445213, -0.05476969853043556, -0.09559177607297897, -0.07139920443296432, 0.19421270489692688, -0.1051102802157402, -0.20008674263954163, -0.10680671036243439, 0.07544992119073868, -0.11823758482933044, 0.08200355619192123, 0.06748265027999878, 0.05398472398519516, -0.05642058700323105, -0.0337405800819397, 0.2480274885892868, -0.07629408687353134, 0.007544951047748327, -0.013382871635258198, -0.004969804082065821, 0.0017660845769569278, -0.1862429827451706, -0.01473975833505392, -0.07028570026159286, -0.006536548957228661, 0.02961231768131256, -0.04849729686975479, 0.12893445789813995, -0.05861213058233261, -0.03844801336526871, -0.027224503457546234, -0.08312200009822845, 0.22730499505996704, 0.048748183995485306, -0.012436549179255962, 0.19211703538894653, -0.030535778030753136, 0.01007651723921299, -0.022805318236351013, -0.0541544109582901, -0.12717589735984802, -0.02073848806321621, 0.1232980340719223, 0.020945169031620026, -0.1972571164369583, -0.04618551954627037, -0.04095741733908653, 0.007732129190117121, -0.07117690145969391, 0.009095252491533756, -0.1245608776807785, 0.009834261611104012, -0.025396589189767838, 0.02329447865486145, 0.008601038716733456, 0.03850023075938225, 0.2555100917816162, -0.057681068778038025, 0.05791186913847923, 0.009437507018446922, -0.031601838767528534, 0.03249089792370796, 0.17912934720516205, 0.21194566786289215, -0.07404673844575882, -0.04399696737527847, 0.15235932171344757, -0.024981822818517685, 0.0111931674182415, 0.04528072848916054, -0.07228284329175949, 0.02546767331659794, -0.10128000378608704, 0.03173376992344856, 0.010992082767188549, 0.0306845773011446, -0.02890883758664131, 0.019472656771540642, -0.10799413174390793, 0.18137620389461517, 0.08968547731637955, 0.19321538507938385, 0.026316717267036438, -0.08931666612625122, 0.04782386124134064, 0.02707810327410698, -0.005169532727450132, -0.06705012172460556, 0.05612838268280029, 0.05201344937086105, -0.06843458861112595, 0.029580937698483467, 0.03555747866630554, 0.10882123559713364, -0.11807815730571747, 0.044797852635383606, -0.1772642731666565, 0.03865499794483185, 0.00014641188317909837, 0.13504965603351593, -0.2642764151096344, 0.15080268681049347, 0.028027722612023354, 0.0919579416513443, -0.057911038398742676, 0.010461737401783466, -0.010164802893996239, 0.13173837959766388, 0.040573421865701675, -0.04184025898575783, -0.07311225682497025, 0.041289061307907104, 0.018498511984944344, 0.12367109954357147, 0.03195255249738693, -0.006389417219907045, 0.050189245492219925, -0.028033684939146042, 0.030797423794865608, 0.032700859010219574, 0.10673429071903229, -0.16610927879810333, -0.07988692075014114, 0.009724391624331474, -0.049576979130506516, -0.1525949239730835, -0.12268848717212677, -0.0352911576628685, 0.07502395659685135, 0.023276684805750847, -0.08769549429416656, 0.00608467124402523, -0.04907456412911415, 0.0932939201593399, 0.11532726138830185, -0.032348763197660446, 0.024940166622400284, -0.007471437565982342, 0.20171475410461426, -0.08859958499670029, -0.11030060052871704, -0.009888287633657455, -0.14336757361888885, -0.1051953062415123, -0.06202247738838196, 0.09738318622112274, -0.07297872006893158, 0.02289704792201519, 0.061369020491838455, -0.05789830535650253, -0.0353529118001461, -0.046105142682790756, 0.05906738340854645, 0.0410546250641346, 0.02330946736037731, 0.09571835398674011, -0.08787873387336731, -0.009253133088350296, -0.029669612646102905, 0.005219489801675081, 0.04141540080308914, 0.24075908958911896, -0.017744677141308784, 0.0379345640540123, 0.09399425238370895, -0.18363825976848602, -0.21234112977981567, -0.0003763089480344206, -0.0007329323561862111, 0.04311966523528099, 0.0000631471339147538, -0.06672206521034241, 0.041558414697647095, -0.0363086573779583, -0.002669742563739419, -0.03653113171458244, -0.17733073234558105, -0.11657468974590302, 0.0893787145614624, 0.019192080944776535, 0.15529455244541168, 0.01841890998184681, 0.038878168910741806, -0.03621715307235718, -0.12644892930984497, -0.020998194813728333, 0.029003338888287544, 0.09354449063539505, -0.07960844784975052, -0.10559210181236267, 0.007794605102390051, 0.009580065496265888, 0.13499759137630463, 0.0019137647468596697, 0.1645500510931015, 0.04926605150103569, 0.16173726320266724, -0.024427693337202072, -0.023524731397628784, 0.09671758860349655, 0.05613664537668228, 0.13266199827194214, -0.011017213575541973, -0.09351692348718643, 0.017693815752863884, 0.11857950687408447, 0.014247833751142025, -0.08269696682691574, -0.056165967136621475, -0.023959193378686905, 0.04788593575358391, 0.004916718229651451, -0.04371311515569687, -0.07654901593923569, -0.07620963454246521, 0.034969814121723175, 0.11228030920028687, -0.22844159603118896, -0.09659651666879654, 0.01984713412821293, 0.01067426335066557, 0.07743073254823685, 0.025670088827610016, 0.06945276260375977, 0.11438383907079697, -0.09111039340496063, 0.032617125660181046, 0.10795830190181732, -0.14624588191509247, 0.01639959216117859, 0.04859011992812157, -0.0756974071264267, -0.08772656321525574, -0.02771541103720665, -0.09231612831354141, 0.07632637768983841, 0.018538212403655052, 0.12313646823167801, -0.02967430278658867, -0.0018118384759873152, -0.04283146560192108, -0.02540101483464241, 0.006375512108206749, 0.09453444182872772, 0.027098992839455605, 0.0239524245262146, -0.009743268601596355, 0.21180449426174164, -0.05675950273871422, -0.053742676973342896, 0.01623971201479435, -0.14078636467456818, -0.03143548220396042, -0.07467302680015564, 0.05526338890194893, 0.3546738624572754, -0.029481491073966026, -0.12038423120975494, -0.18119974434375763, -0.06212019920349121, 0.05793069675564766, 0.20622605085372925, 0.11662513017654419, 0.048793964087963104, -0.0015077523421496153, -0.00790609885007143, -0.01793639548122883, 0.08949287235736847, 0.086868517100811, 0.0766177549958229, -0.13215915858745575, -0.04214078187942505, 0.013111220672726631, 0.12922817468643188, -0.03415094316005707, 0.026620203629136086, -0.060340140014886856, 0.002107159001752734, -0.13354173302650452, 0.082419753074646, 0.030569149181246758, 0.03129570186138153, 0.06797794252634048, -0.0887431725859642, -0.08089079707860947, 0.09299925714731216, -0.03729386255145073, 0.003952218219637871, -0.003396697109565139, 0.09206028282642365, -0.07547004520893097, -0.08063699305057526, 0.046635136008262634, -0.06719868630170822, 0.09600481390953064, -0.05739706754684448, -0.027700599282979965, 0.09966139495372772, -0.11946297436952591, -0.08723734319210052, 0.052900075912475586, 0.08729079365730286, -0.05056571960449219, -0.05595175921916962, 0.038224395364522934, 0.04239419475197792, -0.02807834930717945, -0.04154118895530701, 0.08071459084749222, -0.10087165981531143, -0.11719638854265213, -0.07141419500112534, -0.07267327606678009, 0.01599195785820484, -0.054313503205776215, 0.10199225693941116, 0.15923109650611877, 0.10536131262779236, -0.03705231472849846, 0.027458786964416504, -0.1508447229862213, 0.027721907943487167, 0.02400461956858635, -0.034697920083999634, 0.01819268800318241, -0.07122770696878433, 0.032061804085969925, -0.09760508686304092, 0.32605791091918945, 0.006432889960706234, 0.12803448736667633, 0.009530213661491871, 0.03795793652534485, 0.06503237783908844, 0.07110636681318283, -0.033487800508737564, -0.09553071111440659, 0.05662000551819801, -0.06007949262857437, 0.07570044696331024, 0.09424842149019241, 0.06668659299612045, -0.02809535712003708, 0.06240887939929962, 0.021361734718084335, 0.17261233925819397, 0.06769602745771408, -0.02242395281791687, 0.010621483437716961, -0.008264994248747826, 0.11119581013917923, 0.027332477271556854, 0.043133076280355453, -0.11379353702068329, 0.196552574634552, -0.11247701197862625, 0.09702501446008682, -0.014431990683078766, -0.018133409321308136, -0.09982265532016754, -0.10445848107337952, -0.10812434554100037, -0.05607941746711731, 0.03657116740942001, -0.190355584025383, -0.0387037992477417, 0.13731077313423157, 0.02308438904583454, -0.013340291567146778, 0.06732684373855591, 0.020424988120794296, -0.06543957442045212, -0.025153303518891335, -0.017950788140296936, -0.00591245386749506, 0.051369164139032364, -0.01994454860687256, 0.05778289586305618, 0.034348513931035995, -0.09120994061231613, -0.049408216029405594, 0.024899279698729515, -0.006360931787639856, -0.07731936126947403, 0.010615977458655834, 0.00042650700197555125, -0.08976570516824722, -0.020663009956479073, 0.11299249529838562, 0.03238977491855621, -0.0812184140086174, 0.04813533276319504, 0.33831942081451416, 0.0479435957968235, -0.1673712581396103, -0.1585710495710373, 0.02313188649713993, 0.1017325147986412, 0.11391771584749222, 0.05835521221160889, -0.02250337414443493, -0.05569387227296829, 0.08508128672838211, 0.1491890400648117, -0.07146449387073517, -0.06673972308635712, 0.008005029521882534, 0.01356426440179348, -0.0008627235074527562, 0.00797300972044468, 0.056262508034706116, 0.11901964992284775, -0.061304524540901184, -0.047288719564676285, -0.08917120844125748, 0.07328968495130539, 0.013838323764503002, 0.1885341852903366, 0.08347433805465698, 0.0261766966432333, -0.03152351826429367, -0.01944158598780632, 0.048112865537405014, -0.10518930107355118, -0.08685388416051865, -0.1003246083855629, -0.07846179604530334, -0.026589013636112213, 0.00009356194641441107, -0.0930362120270729, 0.07318908721208572, -0.10274751484394073, -0.01779770292341709, 0.00589671079069376, -0.0054200016893446445, -0.124429851770401, -0.04775585979223251, 0.04797092452645302, 0.08849944919347763, 0.1406693458557129, 0.003804172622039914, 0.13526415824890137, 0.028064750134944916, 0.05291600897908211, -0.020870603621006012, 0.08544454723596573, -0.04294724017381668, 0.053676947951316833, 0.010791940614581108, 0.034920234233140945, -0.02043444663286209, 0.07795383036136627, 0.05876598879694939, -0.052820660173892975, 0.059607747942209244, -0.009014475159347057, -0.12879422307014465, 0.02939741499722004, 0.10433752834796906, -0.13157035410404205, 0.02822357416152954, 0.012939664535224438, 0.02232976071536541, -0.0018656488973647356, -0.05958029627799988, 0.14186981320381165, 0.03647703677415848, 0.04946058616042137, 0.023203562945127487, -0.011073330417275429, 0.05617094784975052, -0.01651258021593094, -0.09543228894472122, -0.057988282293081284, -0.11223649978637695, 0.04106767103075981, -0.05143919214606285, 0.008204854093492031, 0.09355361759662628, 0.0518331341445446, 0.03667614981532097, -0.08899902552366257, -0.19204728305339813, 0.002500666305422783, -0.0024231348652392626, -0.06969204545021057, -0.1515447050333023 ]
4e9b59d201daa496f24416bd1d01f31f21fa39df
**French-Public Domain-Newspapers** or **French-PD-Newpapers** is a large collection aiming to agregate all the French newspapers and periodicals in the public domain. The collection has been originally compiled by Pierre-Carl Langlais, on the basis of a large corpus curated by Benoît de Courson, Benjamin Azoulay for [Gallicagram](https://shiny.ens-paris-saclay.fr/app/gallicagram) and in cooperation with OpenLLMFrance. Gallicagram is leading cultural analytics project giving access to word and ngram search on very large cultural heritage datasets in French and other languages. ## Content As of January 2024, the collection contains nearly three million unique newspaper and periodical editions (69,763,525,347 words) from the French National Library (Gallica). Each parquet file has the full text of a few thousand selected at random and, when available, a few core metadatas (Gallica id, title, author, word counts…). The metadata can be easily expanded thanks to the BNF API. This initial agregation was made possible thanks to the open data program of the French National Library and the consolidation of public domain status for cultural heritage works in the EU with the 2019 Copyright Directive (art. 14) The composition of the dataset adheres to the French criteria for public domain of collective works (any publication older than 70 years ago) and individual works (any publication with an author dead for more than 70 years). In agreement with the shorter term rules, the dataset is in the public domain everywhere. ## Uses The primary use of the collection is for cultural analytics project on a wide scale. The collection also aims to expand the availability of open works for the training of Large Language Models. The text can be used for model training and republished without restriction for reproducibility purposes. ## License The entire collection is in the public domain everywhere. This means that the patrimonial rights of each individual or collective rightholders have expired. The French National Library claims additional rights in its terms of use and restrict commercial use: "La réutilisation commerciale de ces contenus est payante et fait l'objet d'une licence. Est entendue par réutilisation commerciale la revente de contenus sous forme de produits élaborés ou de fourniture de service ou toute autre réutilisation des contenus générant directement des revenus." There has been a debate for years in Europe over the definition of public domain and the possibility to restrict its use. Since 2019, the EU Copyright Directive state that "Member States shall provide that, when the term of protection of a work of visual art has expired, any material resulting from an act of reproduction of that work is not subject to copyright or related rights, unless the material resulting from that act of reproduction is original in the sense that it is the author's own intellectual creation."(art. 14) ## Future developments This dataset is not a one time work but will continue to evolve significantly on two directions: * Correction of computer generated errors in the text. All the texts have been transcribed automatically through the use of Optical Character Recognition (OCR) software. The original files have been digitized over a long time period (since the mid-2000s) and some documents should be. Future versions will strive either to re-OCRize the original text or use experimental LLM models for partial OCR correction. * Enhancement of the structure/editorial presentation of the original text. Some parts of the original documents are likely unwanted for large scale analysis or model training (header, page count…). Additionally, some advanced document structures like tables or multi-column layout are unlikely to be well formatted. Major enhancements could be experted through applying new SOTA layout recognition models (like COLAF) on the original PDF files. * Expansion of the collection to other cultural heritage holdings, especially coming from Hathi Trust, Internet Archive and Google Books.
PleIAs/French-PD-Newspapers
[ "task_categories:text-generation", "language:fr", "ocr", "region:us" ]
2024-01-25T10:50:34+00:00
{"language": ["fr"], "task_categories": ["text-generation"], "pretty_name": "French-Public Domain-Newspapers", "tags": ["ocr"]}
2024-02-02T14:38:51+00:00
[]
[ "fr" ]
TAGS #task_categories-text-generation #language-French #ocr #region-us
French-Public Domain-Newspapers or French-PD-Newpapers is a large collection aiming to agregate all the French newspapers and periodicals in the public domain. The collection has been originally compiled by Pierre-Carl Langlais, on the basis of a large corpus curated by Benoît de Courson, Benjamin Azoulay for Gallicagram and in cooperation with OpenLLMFrance. Gallicagram is leading cultural analytics project giving access to word and ngram search on very large cultural heritage datasets in French and other languages. ## Content As of January 2024, the collection contains nearly three million unique newspaper and periodical editions (69,763,525,347 words) from the French National Library (Gallica). Each parquet file has the full text of a few thousand selected at random and, when available, a few core metadatas (Gallica id, title, author, word counts…). The metadata can be easily expanded thanks to the BNF API. This initial agregation was made possible thanks to the open data program of the French National Library and the consolidation of public domain status for cultural heritage works in the EU with the 2019 Copyright Directive (art. 14) The composition of the dataset adheres to the French criteria for public domain of collective works (any publication older than 70 years ago) and individual works (any publication with an author dead for more than 70 years). In agreement with the shorter term rules, the dataset is in the public domain everywhere. ## Uses The primary use of the collection is for cultural analytics project on a wide scale. The collection also aims to expand the availability of open works for the training of Large Language Models. The text can be used for model training and republished without restriction for reproducibility purposes. ## License The entire collection is in the public domain everywhere. This means that the patrimonial rights of each individual or collective rightholders have expired. The French National Library claims additional rights in its terms of use and restrict commercial use: "La réutilisation commerciale de ces contenus est payante et fait l'objet d'une licence. Est entendue par réutilisation commerciale la revente de contenus sous forme de produits élaborés ou de fourniture de service ou toute autre réutilisation des contenus générant directement des revenus." There has been a debate for years in Europe over the definition of public domain and the possibility to restrict its use. Since 2019, the EU Copyright Directive state that "Member States shall provide that, when the term of protection of a work of visual art has expired, any material resulting from an act of reproduction of that work is not subject to copyright or related rights, unless the material resulting from that act of reproduction is original in the sense that it is the author's own intellectual creation."(art. 14) ## Future developments This dataset is not a one time work but will continue to evolve significantly on two directions: * Correction of computer generated errors in the text. All the texts have been transcribed automatically through the use of Optical Character Recognition (OCR) software. The original files have been digitized over a long time period (since the mid-2000s) and some documents should be. Future versions will strive either to re-OCRize the original text or use experimental LLM models for partial OCR correction. * Enhancement of the structure/editorial presentation of the original text. Some parts of the original documents are likely unwanted for large scale analysis or model training (header, page count…). Additionally, some advanced document structures like tables or multi-column layout are unlikely to be well formatted. Major enhancements could be experted through applying new SOTA layout recognition models (like COLAF) on the original PDF files. * Expansion of the collection to other cultural heritage holdings, especially coming from Hathi Trust, Internet Archive and Google Books.
[ "## Content\nAs of January 2024, the collection contains nearly three million unique newspaper and periodical editions (69,763,525,347 words) from the French National Library (Gallica). Each parquet file has the full text of a few thousand selected at random and, when available, a few core metadatas (Gallica id, title, author, word counts…). The metadata can be easily expanded thanks to the BNF API.\n\nThis initial agregation was made possible thanks to the open data program of the French National Library and the consolidation of public domain status for cultural heritage works in the EU with the 2019 Copyright Directive (art. 14)\n\nThe composition of the dataset adheres to the French criteria for public domain of collective works (any publication older than 70 years ago) and individual works (any publication with an author dead for more than 70 years). In agreement with the shorter term rules, the dataset is in the public domain everywhere.", "## Uses\nThe primary use of the collection is for cultural analytics project on a wide scale.\n\nThe collection also aims to expand the availability of open works for the training of Large Language Models. The text can be used for model training and republished without restriction for reproducibility purposes.", "## License\nThe entire collection is in the public domain everywhere. This means that the patrimonial rights of each individual or collective rightholders have expired.\n\nThe French National Library claims additional rights in its terms of use and restrict commercial use: \"La réutilisation commerciale de ces contenus est payante et fait l'objet d'une licence. Est entendue par réutilisation commerciale la revente de contenus sous forme de produits élaborés ou de fourniture de service ou toute autre réutilisation des contenus générant directement des revenus.\"\n\nThere has been a debate for years in Europe over the definition of public domain and the possibility to restrict its use. Since 2019, the EU Copyright Directive state that \"Member States shall provide that, when the term of protection of a work of visual art has expired, any material resulting from an act of reproduction of that work is not subject to copyright or related rights, unless the material resulting from that act of reproduction is original in the sense that it is the author's own intellectual creation.\"(art. 14)", "## Future developments\nThis dataset is not a one time work but will continue to evolve significantly on two directions:\n* Correction of computer generated errors in the text. All the texts have been transcribed automatically through the use of Optical Character Recognition (OCR) software. The original files have been digitized over a long time period (since the mid-2000s) and some documents should be. Future versions will strive either to re-OCRize the original text or use experimental LLM models for partial OCR correction.\n* Enhancement of the structure/editorial presentation of the original text. Some parts of the original documents are likely unwanted for large scale analysis or model training (header, page count…). Additionally, some advanced document structures like tables or multi-column layout are unlikely to be well formatted. Major enhancements could be experted through applying new SOTA layout recognition models (like COLAF) on the original PDF files.\n* Expansion of the collection to other cultural heritage holdings, especially coming from Hathi Trust, Internet Archive and Google Books." ]
[ "TAGS\n#task_categories-text-generation #language-French #ocr #region-us \n", "## Content\nAs of January 2024, the collection contains nearly three million unique newspaper and periodical editions (69,763,525,347 words) from the French National Library (Gallica). Each parquet file has the full text of a few thousand selected at random and, when available, a few core metadatas (Gallica id, title, author, word counts…). The metadata can be easily expanded thanks to the BNF API.\n\nThis initial agregation was made possible thanks to the open data program of the French National Library and the consolidation of public domain status for cultural heritage works in the EU with the 2019 Copyright Directive (art. 14)\n\nThe composition of the dataset adheres to the French criteria for public domain of collective works (any publication older than 70 years ago) and individual works (any publication with an author dead for more than 70 years). In agreement with the shorter term rules, the dataset is in the public domain everywhere.", "## Uses\nThe primary use of the collection is for cultural analytics project on a wide scale.\n\nThe collection also aims to expand the availability of open works for the training of Large Language Models. The text can be used for model training and republished without restriction for reproducibility purposes.", "## License\nThe entire collection is in the public domain everywhere. This means that the patrimonial rights of each individual or collective rightholders have expired.\n\nThe French National Library claims additional rights in its terms of use and restrict commercial use: \"La réutilisation commerciale de ces contenus est payante et fait l'objet d'une licence. Est entendue par réutilisation commerciale la revente de contenus sous forme de produits élaborés ou de fourniture de service ou toute autre réutilisation des contenus générant directement des revenus.\"\n\nThere has been a debate for years in Europe over the definition of public domain and the possibility to restrict its use. Since 2019, the EU Copyright Directive state that \"Member States shall provide that, when the term of protection of a work of visual art has expired, any material resulting from an act of reproduction of that work is not subject to copyright or related rights, unless the material resulting from that act of reproduction is original in the sense that it is the author's own intellectual creation.\"(art. 14)", "## Future developments\nThis dataset is not a one time work but will continue to evolve significantly on two directions:\n* Correction of computer generated errors in the text. All the texts have been transcribed automatically through the use of Optical Character Recognition (OCR) software. The original files have been digitized over a long time period (since the mid-2000s) and some documents should be. Future versions will strive either to re-OCRize the original text or use experimental LLM models for partial OCR correction.\n* Enhancement of the structure/editorial presentation of the original text. Some parts of the original documents are likely unwanted for large scale analysis or model training (header, page count…). Additionally, some advanced document structures like tables or multi-column layout are unlikely to be well formatted. Major enhancements could be experted through applying new SOTA layout recognition models (like COLAF) on the original PDF files.\n* Expansion of the collection to other cultural heritage holdings, especially coming from Hathi Trust, Internet Archive and Google Books." ]
[ 26, 205, 64, 224, 249 ]
[ "passage: TAGS\n#task_categories-text-generation #language-French #ocr #region-us \n## Content\nAs of January 2024, the collection contains nearly three million unique newspaper and periodical editions (69,763,525,347 words) from the French National Library (Gallica). Each parquet file has the full text of a few thousand selected at random and, when available, a few core metadatas (Gallica id, title, author, word counts…). The metadata can be easily expanded thanks to the BNF API.\n\nThis initial agregation was made possible thanks to the open data program of the French National Library and the consolidation of public domain status for cultural heritage works in the EU with the 2019 Copyright Directive (art. 14)\n\nThe composition of the dataset adheres to the French criteria for public domain of collective works (any publication older than 70 years ago) and individual works (any publication with an author dead for more than 70 years). In agreement with the shorter term rules, the dataset is in the public domain everywhere.## Uses\nThe primary use of the collection is for cultural analytics project on a wide scale.\n\nThe collection also aims to expand the availability of open works for the training of Large Language Models. The text can be used for model training and republished without restriction for reproducibility purposes." ]
[ -0.03662563115358353, 0.10014598071575165, -0.001755271921865642, 0.0323171392083168, 0.050696469843387604, 0.01613321714103222, 0.11526456475257874, 0.02833395265042782, -0.012415318749845028, 0.01448679156601429, 0.02000337652862072, -0.1403912901878357, -0.055832602083683014, 0.026444530114531517, -0.00946011021733284, -0.19113659858703613, 0.03747421130537987, -0.024312935769557953, -0.09501294791698456, -0.0038354946300387383, 0.1230681762099266, -0.02167806774377823, 0.044289425015449524, 0.004388999193906784, 0.0022751030046492815, 0.03267707675695419, -0.010736758820712566, -0.10956364870071411, 0.07209015637636185, 0.07507313787937164, 0.016025634482502937, 0.054113730788230896, 0.05508742108941078, -0.010331662371754646, 0.005816741846501827, 0.020589502528309822, -0.06034216657280922, 0.0027733100578188896, 0.06640801578760147, -0.045801665633916855, 0.1398485004901886, -0.03825678303837776, -0.06258056312799454, 0.05716412886977196, -0.05268638953566551, -0.06515459716320038, -0.06245056912302971, -0.11457619071006775, -0.12864366173744202, 0.11684561520814896, -0.0023300156462937593, 0.02043101191520691, -0.1622350513935089, -0.04662749171257019, 0.026905689388513565, -0.20256595313549042, -0.012813573703169823, 0.13332206010818481, 0.05622503161430359, 0.20502866804599762, -0.08033958077430725, 0.07896868139505386, 0.02463454194366932, 0.04615085572004318, 0.03034256584942341, -0.0737520232796669, -0.07868450880050659, -0.030993804335594177, -0.02695181593298912, -0.04044107347726822, 0.3692326247692108, -0.0017248475924134254, -0.05353740602731705, -0.044128336012363434, -0.05386102572083473, 0.10792098939418793, -0.06462280452251434, 0.06646924465894699, 0.1279536634683609, 0.061004314571619034, 0.0361868217587471, -0.11632522940635681, -0.09277335554361343, -0.02588227018713951, -0.17489781975746155, 0.06327027827501297, -0.03595012426376343, 0.04459422454237938, -0.07195206731557846, -0.011220506392419338, -0.21124309301376343, -0.011103274300694466, -0.11393129080533981, -0.05507635697722435, -0.005088413134217262, -0.004094012547284365, -0.08415177464485168, 0.02311241440474987, 0.03125851973891258, 0.06441239267587662, -0.12690404057502747, -0.06762513518333435, -0.09786100685596466, 0.1459020972251892, 0.10426157712936401, 0.0011506052687764168, -0.10163044929504395, -0.07245837897062302, 0.09693444520235062, -0.05856725201010704, -0.009536521509289742, 0.03223958611488342, -0.030908800661563873, -0.05988185852766037, -0.0724819079041481, -0.006986595224589109, -0.030117232352495193, -0.055627353489398956, 0.02016657218337059, -0.006713213864713907, 0.04383072629570961, -0.06360478699207306, -0.039828285574913025, 0.01111543457955122, -0.0816163420677185, 0.07038157433271408, -0.03269242122769356, -0.0593523345887661, -0.049342211335897446, 0.0233016274869442, -0.10198667645454407, -0.04801856353878975, -0.0608704648911953, -0.10937093198299408, 0.02764124609529972, -0.011159125715494156, -0.039342593401670456, -0.10308504849672318, -0.11544269323348999, -0.05324678495526314, 0.034526679664850235, -0.06591826677322388, 0.009892039000988007, -0.012425463646650314, -0.04368254169821739, -0.04935922473669052, 0.0005981745198369026, -0.030265042558312416, -0.10088794678449631, 0.011965719982981682, -0.14810259640216827, 0.0543576180934906, -0.15816640853881836, 0.0323527529835701, -0.146910160779953, -0.01485762931406498, -0.15538161993026733, 0.05265877768397331, -0.01960953138768673, -0.01639063097536564, 0.0027779191732406616, -0.06083988770842552, -0.09580782800912857, 0.03127361834049225, -0.007394017186015844, 0.08600076287984848, -0.177940234541893, -0.07188539952039719, 0.1928299516439438, -0.09081970155239105, -0.052640292793512344, 0.1894216686487198, -0.026676883921027184, 0.10485076904296875, 0.07811570167541504, 0.10337278991937637, 0.06381125003099442, 0.03729114681482315, -0.08556200563907623, 0.02609953097999096, -0.03230801224708557, -0.1093427762389183, 0.07263772189617157, -0.05109501630067825, 0.058554649353027344, 0.004986723884940147, 0.01634962111711502, -0.014651959761977196, -0.0055940039455890656, -0.004734158515930176, -0.023944979533553123, -0.015844685956835747, 0.05423150956630707, 0.020567744970321655, 0.07594405859708786, -0.05440433323383331, 0.012532421387732029, 0.06230950728058815, 0.04788437485694885, -0.03571285307407379, 0.0042151641100645065, 0.005989416502416134, 0.13339021801948547, 0.002086170483380556, -0.0017727764789015055, -0.12905479967594147, 0.022209808230400085, 0.01492434460669756, 0.080988809466362, 0.0883212462067604, 0.18927831947803497, 0.08747900277376175, 0.03172067180275917, -0.0972457230091095, 0.1091541275382042, -0.10638273507356644, -0.08243028074502945, -0.009199651889503002, -0.013582803308963776, 0.10111421346664429, -0.03536808863282204, 0.151875302195549, -0.13351070880889893, -0.006738394033163786, 0.00512748071923852, 0.008428961038589478, -0.01129171997308731, 0.0426393486559391, -0.03420325368642807, 0.03061806410551071, -0.1179460883140564, 0.009036460891366005, 0.021499821916222572, -0.017395013943314552, 0.04314367473125458, 0.0923384577035904, -0.03830915316939354, 0.060408636927604675, 0.14180877804756165, -0.030689626932144165, 0.0109634418040514, 0.024866608902812004, -0.031785864382982254, 0.08514873683452606, -0.12095852196216583, -0.040878139436244965, 0.09946271032094955, 0.004947013221681118, 0.11988351494073868, -0.16411329805850983, 0.029795004054903984, 0.07269646227359772, -0.011567806825041771, -0.018747679889202118, 0.03630648925900459, 0.1338403820991516, -0.1031917929649353, 0.12754696607589722, 0.08403610438108444, -0.07614711672067642, 0.2818496823310852, 0.02957615815103054, -0.08734820038080215, 0.079615518450737, 0.022614460438489914, 0.01635662093758583, 0.1290864795446396, -0.04373802989721298, 0.045553628355264664, 0.021830828860402107, 0.09990023076534271, 0.03968873620033264, -0.01795959658920765, 0.01966286264359951, -0.040878914296627045, -0.07116471976041794, -0.04290664941072464, -0.044791799038648605, 0.04002390056848526, 0.16181433200836182, 0.023391608148813248, -0.10266482830047607, -0.012276318855583668, -0.015618771314620972, -0.001341900322586298, 0.1528710275888443, -0.1026407778263092, -0.12821504473686218, -0.0799311175942421, 0.16682428121566772, -0.19589795172214508, 0.045249924063682556, 0.01262480765581131, -0.04507555067539215, -0.025621408596634865, -0.11606808751821518, -0.01840638741850853, -0.10645148903131485, -0.10715392976999283, -0.09157217293977737, 0.14257220923900604, -0.08676068484783173, -0.15774808824062347, -0.021130535751581192, -0.08147643506526947, -0.10566700994968414, 0.03762092441320419, -0.0405583456158638, 0.05300229787826538, 0.011373437941074371, -0.08858617395162582, -0.06878355145454407, -0.012166094966232777, 0.07706237584352493, -0.06894365698099136, 0.04652397707104683, 0.08769161999225616, 0.13720828294754028, 0.0013637475203722715, 0.09602460265159607, 0.021033991128206253, -0.0744156613945961, -0.021228574216365814, 0.07591408491134644, -0.09824448823928833, -0.13525037467479706, -0.1874835193157196, -0.16177372634410858, 0.02574283257126808, 0.04805959016084671, 0.024353167042136192, 0.031765516847372055, 0.06749077141284943, -0.07287335395812988, -0.005371231120079756, 0.03713748976588249, 0.01049150712788105, 0.1734691560268402, 0.028274692595005035, 0.0903896614909172, -0.08620549738407135, -0.00518433190882206, 0.1791236251592636, -0.02824249304831028, 0.3072024881839752, -0.0547078512609005, 0.14698387682437897, 0.03532532975077629, 0.025166455656290054, 0.07397818565368652, 0.1737518608570099, -0.012484877370297909, -0.002249050885438919, -0.07246921211481094, -0.025319769978523254, 0.015712102875113487, 0.05954934284090996, 0.001827647676691413, -0.14101146161556244, -0.039795804768800735, -0.19262196123600006, 0.03472728654742241, 0.09203017503023148, 0.08508185297250748, -0.127155140042305, -0.010046909563243389, 0.0463249571621418, 0.038144588470458984, -0.12211629003286362, 0.10545586049556732, 0.2683602571487427, -0.082138791680336, 0.050495944917201996, 0.06168600544333458, 0.10993489623069763, -0.0040374817326664925, 0.008657331578433514, -0.11744295805692673, -0.02126799337565899, -0.023653846234083176, 0.06733476370573044, -0.08271404355764389, 0.20550896227359772, 0.020254064351320267, 0.008672387339174747, -0.08010679483413696, -0.03246297687292099, 0.05320519581437111, 0.08576823770999908, 0.11703471094369888, 0.028734086081385612, -0.029566684737801552, 0.07339171320199966, 0.006914972327649593, 0.03290492296218872, -0.021870329976081848, -0.04221037030220032, 0.030518436804413795, 0.0074589489959180355, 0.004063305910676718, -0.026604734361171722, -0.05900198593735695, -0.13162478804588318, -0.17748422920703888, -0.039514899253845215, 0.13674263656139374, -0.01905776932835579, 0.01077148225158453, -0.02772313356399536, 0.006652849726378918, 0.18504555523395538, 0.05559113994240761, -0.03308258578181267, -0.08096397668123245, -0.0834866464138031, 0.1224636435508728, 0.019887173548340797, 0.026825672015547752, -0.007681007497012615, 0.06767494231462479, -0.10219328850507736, -0.1056319922208786, 0.01586746796965599, -0.1005018949508667, -0.03312491253018379, 0.003782746847718954, 0.05086332932114601, 0.03878359869122505, 0.031478401273489, 0.030881086364388466, 0.06533026695251465, -0.0034178204368799925, -0.13010592758655548, -0.0988224595785141, -0.02146710641682148, 0.10851065069437027, 0.027804501354694366, -0.19223007559776306, -0.09757614880800247, 0.06489352881908417, -0.005881303921341896, 0.16113147139549255, 0.2476428747177124, -0.05461636930704117, 0.11358575522899628, 0.21683700382709503, -0.0685117244720459, -0.25742554664611816, 0.04394976794719696, -0.06130481883883476, 0.00906451977789402, -0.0014186367625370622, -0.18568342924118042, 0.029578758403658867, 0.1658473163843155, 0.005793662276118994, 0.05506270006299019, -0.19897525012493134, -0.06899397820234299, 0.0013020929181948304, -0.05186589062213898, 0.4177429974079132, 0.012813602574169636, 0.06741154193878174, -0.089607834815979, 0.03530105575919151, 0.1495949625968933, -0.15426050126552582, 0.09708954393863678, -0.02726287767291069, -0.045777253806591034, -0.007531937677413225, -0.035591207444667816, 0.09661629796028137, 0.0199459046125412, 0.02285270392894745, -0.03347105160355568, 0.1128385066986084, 0.29346445202827454, 0.015658747404813766, 0.06700695306062698, -0.07499659061431885, -0.03297700732946396, -0.1309899091720581, -0.042890891432762146, -0.0664902925491333, 0.11492930352687836, -0.03125084564089775, -0.046776607632637024, -0.07452719658613205, 0.07735767960548401, -0.03957591578364372, -0.016669852659106255, -0.005051834508776665, -0.04709089174866676, 0.05041388049721718, 0.10734878480434418, 0.1876652091741562, -0.12737922370433807, -0.03548390418291092, 0.1362265646457672, 0.034398142248392105, 0.05398663133382797, -0.05141009762883186, -0.0005138426786288619, 0.10409867018461227, -0.02016879804432392, 0.0249930452555418, 0.060487836599349976, -0.03279531002044678, 0.050469979643821716, 0.0846996158361435, -0.01835155114531517, -0.1875256448984146, 0.027306562289595604, -0.04291965812444687, -0.028189461678266525, -0.06028302386403084, 0.12569712102413177, -0.0510660856962204, 0.002892971271649003, -0.04559930041432381, 0.01577402465045452, -0.02366882562637329, 0.03272857144474983, -0.045405350625514984, -0.015620396472513676, -0.08273233473300934, 0.047699667513370514, 0.08411802351474762, -0.1449592411518097, -0.03381695598363876, 0.12063343077898026, -0.08651728928089142, -0.07207958400249481, 0.04730904847383499, 0.010878315195441246, -0.08995187282562256, -0.0415336899459362, -0.03870045766234398, -0.07609014958143234, -0.013583815656602383, 0.08442902565002441, 0.03197323530912399, -0.012692373245954514, -0.011689259670674801, -0.08456713706254959, -0.07473456859588623, -0.012850281782448292, -0.04478108137845993, -0.01107152458280325, 0.07062862068414688, -0.07097568362951279, -0.0007098512724041939, -0.01691705361008644, -0.03302782401442528, -0.035022951662540436, -0.06908383965492249, -0.014933993108570576, -0.12581287324428558, -0.026260897517204285, -0.07404487580060959, -0.025471311062574387, -0.07005619257688522, 0.05516115948557854, -0.06997362524271011, -0.06274893879890442, -0.07531575113534927, -0.003952348604798317, -0.01748872734606266, 0.09978107362985611, -0.0836872085928917, 0.041997116059064865, 0.037410471588373184, -0.039296697825193405, 0.10886755585670471, 0.045779500156641006, -0.05751984193921089, -0.017699800431728363, -0.04168432578444481, -0.0981861874461174, -0.004241828806698322, 0.061323005706071854, 0.03713527321815491, 0.0426580049097538, 0.08611124753952026, 0.08791247010231018, 0.06997436285018921, 0.05232689902186394, -0.1000651940703392, -0.05294257402420044, 0.05290832370519638, 0.019078023731708527, -0.09014523029327393, -0.03288126364350319, 0.01596549153327942, 0.07796371728181839, 0.058794211596250534, 0.060119736939668655, -0.027826251462101936, -0.04110157489776611, -0.05434483289718628, -0.013655138202011585, -0.028115754947066307, -0.06866573542356491, 0.04637056216597557, -0.04685211554169655, 0.022478962317109108, 0.0588209293782711, 0.27034837007522583, 0.0929461270570755, -0.013309575617313385, 0.055275145918130875, -0.0021064546890556812, -0.067413330078125, 0.012475287541747093, 0.03965315967798233, 0.10817109793424606, -0.02405080758035183, -0.06296690553426743, 0.05994105711579323, 0.06836576759815216, 0.040887147188186646, 0.20775200426578522, 0.15568339824676514, 0.07058605551719666, 0.026543177664279938, -0.04220697283744812, -0.12573418021202087, -0.002131104003638029, 0.0894906222820282, 0.0993482694029808, 0.07500146329402924, -0.12962351739406586, -0.05547706037759781, 0.1480381190776825, -0.05263650417327881, 0.15064650774002075, -0.029229866340756416, -0.018305590376257896, -0.10513028502464294, 0.048480719327926636, -0.035971421748399734, -0.09434176981449127, -0.002923473948612809, -0.08131211251020432, 0.05378621444106102, 0.13177165389060974, 0.0894087627530098, -0.03927919268608093, -0.07787550240755081, -0.13858206570148468, -0.12331733852624893, -0.0038239038549363613, -0.028130775317549706, 0.10175985842943192, -0.057480279356241226, -0.01616821065545082, -0.009220825508236885, -0.01687769964337349, 0.04599752277135849, 0.135603666305542, 0.08477894216775894, 0.03313075751066208, -0.08592642843723297, -0.017057327553629875, -0.05622235685586929, 0.030451031401753426, 0.029095519334077835, 0.13292135298252106, 0.025017790496349335, -0.11839164793491364, 0.06706353276968002, 0.19873866438865662, 0.009676805697381496, -0.0971803069114685, -0.08580828458070755, 0.1174546331167221, 0.04755892604589462, 0.03355878219008446, -0.02666681632399559, -0.0591704435646534, -0.03545018658041954, 0.20750455558300018, 0.2703055441379547, -0.012341096997261047, -0.02731715887784958, -0.013978850096464157, -0.022043786942958832, 0.07649306952953339, 0.04343154653906822, -0.002199118956923485, 0.44118210673332214, -0.08294472098350525, 0.03296083211898804, -0.013998234644532204, 0.10047082602977753, -0.13065806031227112, 0.16918201744556427, -0.058567751199007034, -0.019098348915576935, -0.028419099748134613, 0.058097369968891144, -0.0002912173804361373, 0.021314574405550957, 0.05539262294769287, -0.10980179160833359, -0.049267951399087906, -0.0583864264190197, -0.0748056173324585, -0.008243286050856113, 0.06767228245735168, -0.015605761669576168, -0.06639692932367325, -0.023658672347664833, -0.021908635273575783, -0.12641949951648712, -0.1275922656059265, 0.06889389455318451, 0.0805976614356041, 0.1985899806022644, -0.0309076439589262, -0.023427778854966164, 0.06380828469991684, -0.009085352532565594, -0.10550051927566528, 0.0067675551399588585, -0.0021678584162145853, 0.01742280274629593, 0.013222530484199524, 0.024207795038819313, -0.11811429262161255, 0.03903264179825783, 0.03248731419444084, -0.02825326845049858, 0.06788482517004013, -0.028845706954598427, -0.007950820028781891, -0.07389649003744125, 0.04326621815562248, -0.10320629179477692, 0.08892827481031418, 0.06261046975851059, 0.017046891152858734, -0.015132172964513302, -0.07319582998752594, 0.029519684612751007, 0.025922393426299095, -0.0806310772895813, -0.025950999930500984, -0.13241691887378693, -0.03654611483216286, -0.01002141647040844, -0.03136972710490227, -0.3309292197227478, 0.05568819120526314, 0.028956232592463493, 0.06848054379224777, -0.02597004547715187, -0.020164979621767998, 0.06377941370010376, -0.031362567096948624, -0.042904600501060486, -0.039369095116853714, 0.002247384749352932, 0.005304284393787384, 0.000027466217943583615, -0.0561366006731987 ]
b0edd46eb1a0e2d361254e31fc01355e317e475a
# CITYLID: A large-scale categorized aerial Lidar dataset for street-level research <!-- Provide a quick summary of the dataset. --> This repository is dedicated to providing categorized aerial Lidar datasets along with the methodology for data preparation. Details regarding data preparation and usage are given in the [GitHub Repository](https://github.com/deepankverma/navigating_streetscapes) ### Dataset Description The dataset covers the entire state of Berlin and is divided into 1060 tiles of 1 sq. km each. The tiles are further grouped under [9 regions](https://fbinter.stadt-berlin.de/fb/atom/DOP/Blattschnitt2x2km.gif). The dataset comprises (a) [Categorized Point clouds](Lidar_point_clouds) and (b) [Raster image files providing solar radiation maps](solar_radiation_rasters). The details regarding the data preparation can be found in [GitHub Repository](https://github.com/deepankverma/navigating_streetscapes). ## Citation <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> [Verma, D., Mumm, O., & Carlow, V. M. (2023). Generating citywide street cross-sections using aerial LiDAR and detailed street plan. Sustainable Cities and Society, 96, 104673](https://www.sciencedirect.com/science/article/pii/S2210670723002846)
Deepank/CITYLID
[ "language:en", "language:de", "license:mit", "code", "aerial point-cloud", "point-cloud classification", "urban streetscapes", "cross-sections", "region:us" ]
2024-01-25T10:58:19+00:00
{"language": ["en", "de"], "license": "mit", "pretty_name": "CTLID", "tags": ["code", "aerial point-cloud", "point-cloud classification", "urban streetscapes", "cross-sections"]}
2024-02-01T10:53:48+00:00
[]
[ "en", "de" ]
TAGS #language-English #language-German #license-mit #code #aerial point-cloud #point-cloud classification #urban streetscapes #cross-sections #region-us
# CITYLID: A large-scale categorized aerial Lidar dataset for street-level research This repository is dedicated to providing categorized aerial Lidar datasets along with the methodology for data preparation. Details regarding data preparation and usage are given in the GitHub Repository ### Dataset Description The dataset covers the entire state of Berlin and is divided into 1060 tiles of 1 sq. km each. The tiles are further grouped under 9 regions. The dataset comprises (a) Categorized Point clouds and (b) Raster image files providing solar radiation maps. The details regarding the data preparation can be found in GitHub Repository. Verma, D., Mumm, O., & Carlow, V. M. (2023). Generating citywide street cross-sections using aerial LiDAR and detailed street plan. Sustainable Cities and Society, 96, 104673
[ "# CITYLID: A large-scale categorized aerial Lidar dataset for street-level research\n\n\n\nThis repository is dedicated to providing categorized aerial Lidar datasets along with the methodology for data preparation. \nDetails regarding data preparation and usage are given in the GitHub Repository", "### Dataset Description\n\nThe dataset covers the entire state of Berlin and is divided into 1060 tiles of 1 sq. km each. The tiles are further grouped under\n9 regions. The dataset comprises (a) Categorized Point clouds \nand (b) Raster image files providing solar radiation maps. The details regarding the \ndata preparation can be found in GitHub Repository. \n\n\n\nVerma, D., Mumm, O., & Carlow, V. M. (2023). Generating citywide street cross-sections using aerial LiDAR and detailed street plan. Sustainable Cities and Society, 96, 104673" ]
[ "TAGS\n#language-English #language-German #license-mit #code #aerial point-cloud #point-cloud classification #urban streetscapes #cross-sections #region-us \n", "# CITYLID: A large-scale categorized aerial Lidar dataset for street-level research\n\n\n\nThis repository is dedicated to providing categorized aerial Lidar datasets along with the methodology for data preparation. \nDetails regarding data preparation and usage are given in the GitHub Repository", "### Dataset Description\n\nThe dataset covers the entire state of Berlin and is divided into 1060 tiles of 1 sq. km each. The tiles are further grouped under\n9 regions. The dataset comprises (a) Categorized Point clouds \nand (b) Raster image files providing solar radiation maps. The details regarding the \ndata preparation can be found in GitHub Repository. \n\n\n\nVerma, D., Mumm, O., & Carlow, V. M. (2023). Generating citywide street cross-sections using aerial LiDAR and detailed street plan. Sustainable Cities and Society, 96, 104673" ]
[ 45, 68, 146 ]
[ "passage: TAGS\n#language-English #language-German #license-mit #code #aerial point-cloud #point-cloud classification #urban streetscapes #cross-sections #region-us \n# CITYLID: A large-scale categorized aerial Lidar dataset for street-level research\n\n\n\nThis repository is dedicated to providing categorized aerial Lidar datasets along with the methodology for data preparation. \nDetails regarding data preparation and usage are given in the GitHub Repository### Dataset Description\n\nThe dataset covers the entire state of Berlin and is divided into 1060 tiles of 1 sq. km each. The tiles are further grouped under\n9 regions. The dataset comprises (a) Categorized Point clouds \nand (b) Raster image files providing solar radiation maps. The details regarding the \ndata preparation can be found in GitHub Repository. \n\n\n\nVerma, D., Mumm, O., & Carlow, V. M. (2023). Generating citywide street cross-sections using aerial LiDAR and detailed street plan. Sustainable Cities and Society, 96, 104673" ]
[ -0.1486050933599472, 0.2111404538154602, -0.002994379261508584, 0.019000938162207603, 0.08876672387123108, -0.0010087122209370136, 0.16636404395103455, -0.0020186544861644506, 0.05540071427822113, 0.048903681337833405, -0.048441331833601, -0.033989354968070984, 0.05577190965414047, 0.14280582964420319, -0.028450701385736465, -0.2989502549171448, -0.025015057995915413, -0.07014487683773041, -0.15391357243061066, 0.021112166345119476, 0.1273200809955597, -0.05946502834558487, 0.024324480444192886, 0.0244552381336689, -0.11455456912517548, 0.08540338277816772, -0.018482903018593788, -0.05410119518637657, 0.11661168187856674, 0.048035748302936554, 0.13575756549835205, 0.04786287248134613, 0.11954017728567123, -0.017386801540851593, -0.015357761643826962, 0.034482717514038086, -0.13152718544006348, 0.00978804100304842, 0.1874649077653885, 0.004660941660404205, 0.16197220981121063, -0.09389340132474899, -0.05033031478524208, -0.00312258442863822, -0.013097423128783703, -0.1921391487121582, -0.09165976941585541, 0.03916317597031593, 0.015684885904192924, -0.020239267498254776, 0.03842185065150261, -0.017434366047382355, -0.08155139535665512, 0.06617273390293121, 0.012704347260296345, -0.1708461344242096, -0.03304252028465271, 0.19177985191345215, -0.07616672664880753, 0.14199718832969666, -0.08955368399620056, 0.05487601459026337, -0.04282481595873833, 0.02268061973154545, 0.01798323169350624, -0.013361512683331966, 0.03808749094605446, 0.0273321270942688, -0.15412917733192444, -0.054853327572345734, 0.28425082564353943, -0.043366920202970505, 0.0013491592835634947, -0.005968568846583366, -0.11049717664718628, 0.15762613713741302, 0.037392955273389816, 0.020186742767691612, -0.011863341554999352, 0.01082324143499136, -0.03978829085826874, -0.008722156286239624, -0.07583503425121307, 0.04399380832910538, -0.18629862368106842, 0.27161261439323425, 0.011251652613282204, 0.035610996186733246, -0.025354189798235893, 0.1321357786655426, 0.004545559175312519, -0.1152520626783371, -0.021850870922207832, 0.009748349897563457, -0.07090156525373459, 0.04845879226922989, -0.06196068599820137, -0.022492241114377975, 0.04501720145344734, 0.05256784334778786, 0.07509763538837433, -0.031247099861502647, 0.02173195406794548, 0.04331416264176369, 0.10533442348241806, 0.10862763971090317, -0.0062171719036996365, -0.11036360263824463, 0.03649871051311493, -0.155294731259346, 0.00572434114292264, -0.041498854756355286, -0.10559700429439545, -0.030900875106453896, -0.03835739195346832, 0.1119799092411995, 0.014691842719912529, -0.023434091359376907, 0.02976430393755436, -0.039020881056785583, 0.018783345818519592, -0.13755103945732117, 0.029048031195998192, -0.0360361747443676, 0.03384161368012428, -0.05063646286725998, -0.08188220113515854, -0.040736522525548935, -0.026218470185995102, 0.10470131784677505, -0.015347677282989025, -0.01724456436932087, -0.03203278407454491, -0.016236089169979095, -0.021574636921286583, -0.1670009344816208, -0.031680259853601456, -0.1400231122970581, -0.04825796186923981, -0.019886527210474014, -0.01567833684384823, 0.0009749172022566199, 0.055318329483270645, -0.0019147760467603803, 0.0269862599670887, 0.014086117036640644, -0.031054619699716568, -0.05632886290550232, -0.011976976878941059, 0.02845754846930504, -0.08381667733192444, -0.007913583889603615, -0.2016512006521225, 0.0284210667014122, -0.015835922211408615, 0.019646087661385536, 0.003019320545718074, 0.02105591632425785, -0.09611060470342636, -0.0366031639277935, -0.04858139157295227, 0.02982020378112793, 0.060793686658144, -0.03231130167841911, 0.019424546509981155, 0.07724788039922714, -0.25169089436531067, -0.008381006307899952, 0.009902382269501686, -0.12793025374412537, 0.050810664892196655, 0.09196623414754868, -0.059047631919384, -0.00965836364775896, 0.0344187393784523, 0.11711329966783524, 0.06067812070250511, -0.011305401101708412, -0.10090577602386475, 0.08721484243869781, -0.01042101252824068, -0.05502421408891678, 0.09643379598855972, -0.012948812916874886, -0.11021887511014938, 0.01506456546485424, -0.12617653608322144, 0.024400802329182625, 0.0026354745496064425, -0.030654532834887505, -0.05271921306848526, 0.013775267638266087, 0.05896764248609543, 0.041488926857709885, 0.03736903518438339, -0.0035070094745606184, -0.022931760177016258, -0.01862478256225586, 0.0888458788394928, -0.03864287957549095, 0.016606148332357407, -0.009989432990550995, 0.06371065229177475, -0.054728034883737564, -0.01856003701686859, -0.10885435342788696, -0.12114530801773071, 0.05951365455985069, 0.029910173267126083, 0.008588751778006554, 0.15982729196548462, 0.008561824448406696, 0.04017706960439682, -0.09343336522579193, 0.0034320522099733353, -0.06965135037899017, 0.014071657322347164, -0.037511397153139114, -0.0706905946135521, -0.07877015322446823, -0.05590188130736351, -0.11537156999111176, -0.08443139493465424, -0.011855045333504677, 0.2662329077720642, 0.13352298736572266, -0.033662233501672745, -0.0019505195086821914, -0.0534522645175457, -0.0169377438724041, -0.11823246628046036, -0.012819269672036171, -0.02077942155301571, -0.0181178729981184, -0.06461294740438461, 0.008550700731575489, 0.06280755996704102, 0.07499182969331741, 0.015880145132541656, 0.11683208495378494, 0.042384494096040726, -0.05263667181134224, -0.04667096212506294, 0.027943015098571777, -0.1608324944972992, -0.09244832396507263, 0.050642091780900955, -0.06794150173664093, 0.008732740767300129, -0.04921707510948181, -0.010891970247030258, -0.006549728102982044, 0.03775809705257416, -0.11481870710849762, 0.05791627988219261, 0.002260117093101144, -0.09750016778707504, 0.12031348794698715, 0.16578388214111328, 0.04886215180158615, 0.12402888387441635, -0.01697472669184208, 0.0033345771953463554, -0.03568902611732483, 0.021830828860402107, 0.01250965241342783, 0.10605461895465851, -0.08304215967655182, 0.009397124871611595, 0.033240821212530136, 0.05654409900307655, 0.019297447055578232, -0.09974431991577148, 0.019395818933844566, -0.03507145121693611, 0.046615567058324814, -0.021341202780604362, 0.00557026918977499, -0.017876992002129555, 0.0714545026421547, -0.05412355065345764, -0.103439562022686, 0.05428758263587952, -0.06931166350841522, -0.019167229533195496, 0.13048683106899261, -0.112474225461483, -0.22366927564144135, -0.1102614477276802, -0.029759960249066353, -0.14142964780330658, 0.036863867193460464, -0.00265816249884665, -0.05043823644518852, -0.05304592475295067, -0.035932622849941254, 0.18853944540023804, -0.14700376987457275, 0.061686791479587555, -0.11435090750455856, 0.06310250610113144, -0.13665898144245148, -0.12813830375671387, 0.04199552908539772, -0.01418871060013771, -0.03331933915615082, 0.04656105488538742, -0.02331855148077011, 0.13977831602096558, 0.07342978566884995, -0.01714874431490898, 0.06427152454853058, -0.0323435440659523, 0.1738692969083786, -0.10302531719207764, 0.08377517759799957, -0.00467812130227685, -0.02519119530916214, 0.053514715284109116, 0.14092427492141724, 0.08449389040470123, -0.10122711956501007, -0.05975964292883873, -0.05336207523941994, -0.12181656062602997, -0.12975655496120453, -0.15569718182086945, -0.14087596535682678, -0.08550291508436203, -0.02173682115972042, 0.09383196383714676, -0.015601329505443573, 0.05317142978310585, -0.0277491956949234, 0.015651149675250053, -0.08303865790367126, 0.006805709563195705, 0.08124225586652756, 0.039407890290021896, 0.01264998409897089, -0.04577474296092987, -0.10584477335214615, 0.12764766812324524, 0.10308482497930527, 0.2914050221443176, 0.017539246007800102, -0.01614494062960148, 0.024886179715394974, 0.16187067329883575, 0.11293522268533707, 0.07440941035747528, 0.02382654882967472, 0.016518447548151016, -0.007866724394261837, -0.07610166817903519, 0.10655437409877777, 0.10842039436101913, 0.12170971930027008, -0.13859626650810242, 0.0671781674027443, -0.23253211379051208, 0.03189892694354057, 0.09038456529378891, 0.043346766382455826, -0.033401332795619965, 0.037072211503982544, 0.07795464992523193, -0.05803500488400459, -0.09059169143438339, 0.047771379351615906, 0.1686781942844391, -0.10052448511123657, 0.0873318612575531, 0.0011902895057573915, 0.050875432789325714, -0.11023169010877609, -0.07532956451177597, -0.11332958191633224, 0.010960653424263, -0.01857323758304119, 0.10203471779823303, -0.06725078821182251, 0.22024138271808624, 0.012883077375590801, -0.02900417149066925, -0.1398615539073944, 0.04669894650578499, 0.025225184857845306, 0.16271787881851196, 0.15779690444469452, 0.0665983110666275, -0.1274942010641098, -0.057711612433195114, -0.0415828637778759, 0.005230071488767862, 0.1703876256942749, -0.20688332617282867, -0.004476698115468025, 0.07149489969015121, -0.025152741000056267, -0.03904296085238457, -0.011081848293542862, -0.07354561239480972, -0.17404156923294067, 0.08375463634729385, 0.09985645860433578, -0.017474930733442307, -0.03790875896811485, -0.06430035084486008, -0.04918397217988968, 0.08223138004541397, -0.2025139033794403, -0.04761037975549698, -0.20313557982444763, -0.03402082249522209, 0.11291033774614334, -0.05141989141702652, 0.08245693147182465, 0.018464358523488045, 0.037082087248563766, -0.06529822945594788, -0.13876163959503174, 0.049704551696777344, -0.06864768266677856, -0.08564122021198273, -0.08400768041610718, 0.1769363433122635, 0.1053185909986496, -0.010810564272105694, 0.0011822556843981147, 0.14727188646793365, -0.012994480319321156, -0.09806592017412186, 0.031247830018401146, 0.015538608655333519, 0.01577812433242798, 0.14303059875965118, -0.026537079364061356, -0.06654826551675797, 0.11629939824342728, -0.11740739643573761, 0.06176786124706268, 0.0529484897851944, -0.09807154536247253, 0.09974684566259384, 0.10580237954854965, -0.067611925303936, -0.26003074645996094, -0.03602974861860275, 0.002529629971832037, 0.0484856478869915, 0.09634797275066376, -0.1675940454006195, 0.10068399459123611, 0.07011399418115616, -0.05656212195754051, 0.05175292491912842, -0.17903228104114532, -0.04830625280737877, 0.14476938545703888, 0.040742386132478714, 0.1436554342508316, -0.05650424212217331, -0.09791405498981476, -0.0434773713350296, 0.09509875625371933, -0.0043478114530444145, 0.021273953840136528, 0.12493855506181717, 0.005796571262180805, -0.04220808669924736, 0.013893571682274342, -0.014428804628551006, 0.15736570954322815, 0.22117681801319122, 0.04466624557971954, -0.08732996881008148, -0.10204377770423889, 0.2424120008945465, -0.002277453662827611, 0.02178533561527729, 0.03760511428117752, 0.058962199836969376, -0.1785753220319748, -0.05041489750146866, -0.03775233402848244, 0.1287846565246582, -0.024227112531661987, -0.10546324402093887, -0.10611055046319962, 0.10395294427871704, 0.005122795235365629, -0.045142896473407745, 0.20167526602745056, 0.009044144302606583, -0.07938938587903976, 0.15766862034797668, 0.04773838818073273, 0.07535132020711899, -0.08805037289857864, -0.052405957132577896, 0.0004334145924076438, 0.046579401940107346, -0.07969287037849426, 0.027814386412501335, 0.086197130382061, 0.06661787629127502, -0.05357304960489273, 0.05021898075938225, -0.07478237897157669, -0.0027569623198360205, 0.10050702840089798, -0.06918337196111679, -0.2809094488620758, -0.09018313139677048, -0.017973070964217186, 0.02001652680337429, 0.2556643486022949, 0.11367097496986389, 0.011849517934024334, 0.02531435154378414, -0.0347176268696785, -0.03082350082695484, 0.009441650472581387, 0.12065354734659195, 0.07079967111349106, 0.020391765981912613, -0.11542663723230362, 0.1136234924197197, 0.15120336413383484, 0.0603133849799633, -0.0718759223818779, 0.09925373643636703, -0.10376567393541336, -0.027058323845267296, 0.023986773565411568, -0.03287067636847496, -0.03259751573204994, -0.059775568544864655, -0.04264790564775467, -0.09323729574680328, 0.028062667697668076, 0.15531861782073975, 0.08200680464506149, 0.05623123049736023, -0.1052946001291275, -0.08032739162445068, 0.04420412704348564, 0.0005475064390338957, -0.025151656940579414, 0.08354611694812775, -0.031351473182439804, -0.019014935940504074, -0.03395136445760727, 0.09325020015239716, -0.08488910645246506, -0.08435610681772232, -0.10597331076860428, 0.0005597412819042802, -0.037814464420080185, -0.07517710328102112, -0.02011052705347538, 0.0006943356711417437, -0.08046946674585342, -0.08728919178247452, 0.022317035123705864, 0.053777627646923065, -0.04688867926597595, 0.054777540266513824, -0.09710872918367386, 0.08175564557313919, -0.034898530691862106, -0.0005166771588847041, -0.01500303577631712, -0.06688053905963898, 0.18747006356716156, -0.039578743278980255, 0.004311561584472656, -0.022940194234251976, -0.09313338994979858, -0.048603933304548264, -0.006268111988902092, 0.03924638032913208, 0.058540452271699905, -0.01975180394947529, 0.04018485173583031, 0.022395849227905273, 0.05003852769732475, 0.06052621826529503, 0.1403336077928543, -0.11353220045566559, 0.06015260890126228, -0.10907522588968277, -0.1259496659040451, -0.043338168412446976, 0.013649746775627136, 0.051457833498716354, -0.010355747304856777, 0.03913358598947525, 0.025236913934350014, 0.0316479429602623, -0.06614527851343155, -0.04807150736451149, 0.045020222663879395, -0.12395291030406952, -0.05676815286278725, -0.06007043272256851, 0.03920372202992439, 0.02124772220849991, 0.23520353436470032, 0.06257544457912445, -0.055247947573661804, 0.017006488516926765, 0.25426140427589417, 0.08499357849359512, -0.03125316649675369, 0.13104449212551117, 0.07173479348421097, -0.02630835585296154, -0.00570316705852747, -0.025243761017918587, -0.04283306002616882, 0.055026181042194366, 0.12312404066324234, 0.02797810174524784, -0.03450196236371994, 0.05559023097157478, 0.10466868430376053, -0.13212670385837555, -0.15884053707122803, -0.060027025640010834, 0.03951407968997955, 0.04061109945178032, -0.09797095507383347, -0.08157696574926376, 0.06067521125078201, -0.1451358050107956, 0.11008062958717346, 0.10731373727321625, -0.029444685205817223, -0.06783223152160645, -0.2865375280380249, -0.05024456977844238, -0.06010640412569046, 0.03545665740966797, -0.014699119143188, 0.04011591151356697, 0.19663339853286743, 0.018655572086572647, -0.02835601381957531, 0.07822146266698837, -0.03674379363656044, -0.08412135392427444, 0.060838717967271805, -0.014065561816096306, 0.12357030063867569, 0.012205610051751137, -0.03300187736749649, -0.07497507333755493, -0.009023169986903667, -0.05105491355061531, 0.05339104309678078, 0.06887275725603104, -0.01943986490368843, -0.04615575075149536, -0.014323453418910503, -0.046556174755096436, -0.04511065408587456, -0.013886362314224243, 0.12942154705524445, 0.06768598407506943, -0.005554344039410353, 0.03963648900389671, 0.05637985095381737, -0.056181471794843674, 0.01364201307296753, 0.03830915689468384, 0.01618233695626259, 0.008474758826196194, 0.0775575116276741, 0.09768591076135635, -0.11515744775533676, -0.0004499794158618897, 0.04895465821027756, 0.11682039499282837, 0.046910714358091354, -0.005403571762144566, 0.003593378933146596, 0.00514413695782423, 0.08751805126667023, 0.14316575229167938, -0.06188647449016571, 0.3968944549560547, -0.008131167851388454, -0.05984189361333847, 0.022658729925751686, -0.028774864971637726, -0.05317520350217819, 0.23207832872867584, -0.031156474724411964, -0.1024545282125473, -0.044007740914821625, 0.1315716952085495, 0.005723238922655582, -0.026887178421020508, 0.12027740478515625, -0.06387337297201157, -0.10108117014169693, 0.0028036385774612427, 0.074262835085392, -0.043329086154699326, -0.006363759748637676, -0.04886264353990555, 0.052212268114089966, 0.10946281999349594, 0.042618755251169205, -0.17432162165641785, -0.06771199405193329, 0.12017690390348434, -0.10969589650630951, 0.061492759734392166, -0.040681757032871246, 0.11007377505302429, 0.02093149907886982, 0.012526209466159344, -0.07006992399692535, 0.09941472113132477, 0.029278598725795746, -0.06312315165996552, 0.024416590109467506, 0.037575822323560715, -0.05970175936818123, 0.06505043804645538, 0.021180808544158936, 0.08884435892105103, 0.09559617936611176, 0.217209130525589, 0.0737941712141037, -0.09894514083862305, 0.052202414721250534, -0.1294279396533966, 0.04206327721476555, -0.0016801345627754927, -0.005318503361195326, -0.017284980043768883, -0.05622515454888344, 0.03290187567472458, 0.035315390676259995, 0.038266416639089584, 0.03476922586560249, -0.08036071062088013, -0.06352592259645462, -0.03405158966779709, 0.0022459630854427814, -0.017932480201125145, -0.07182326912879944, -0.04989747703075409, 0.011900518089532852, -0.04799797758460045, 0.033848412334918976, 0.0794178918004036, -0.07091443985700607, -0.04399725794792175, -0.18542705476284027, 0.011779647320508957, -0.05608700215816498, -0.11993113160133362, -0.0018031176878139377 ]
58c56cd228431b9dc74be6ed64b563f8959de702
# Dataset Card for Evaluation run of TeeZee/Xwin-LM-70B-V0.1_Limarpv3 <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [TeeZee/Xwin-LM-70B-V0.1_Limarpv3](https://huggingface.co/TeeZee/Xwin-LM-70B-V0.1_Limarpv3) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_TeeZee__Xwin-LM-70B-V0.1_Limarpv3", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-01-25T10:59:31.899107](https://huggingface.co/datasets/open-llm-leaderboard/details_TeeZee__Xwin-LM-70B-V0.1_Limarpv3/blob/main/results_2024-01-25T10-59-31.899107.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.6907434014004448, "acc_stderr": 0.030406546643218627, "acc_norm": 0.6960374849812471, "acc_norm_stderr": 0.03098820704077189, "mc1": 0.3818849449204406, "mc1_stderr": 0.017008101939163495, "mc2": 0.5715047295306395, "mc2_stderr": 0.015147942199667246 }, "harness|arc:challenge|25": { "acc": 0.6569965870307167, "acc_stderr": 0.013872423223718164, "acc_norm": 0.7081911262798635, "acc_norm_stderr": 0.013284525292403511 }, "harness|hellaswag|10": { "acc": 0.6826329416450906, "acc_stderr": 0.004645003662067883, "acc_norm": 0.8697470623381797, "acc_norm_stderr": 0.0033589362798672655 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.29, "acc_stderr": 0.04560480215720684, "acc_norm": 0.29, "acc_norm_stderr": 0.04560480215720684 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.6444444444444445, "acc_stderr": 0.04135176749720385, "acc_norm": 0.6444444444444445, "acc_norm_stderr": 0.04135176749720385 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.8289473684210527, "acc_stderr": 0.03064360707167709, "acc_norm": 0.8289473684210527, "acc_norm_stderr": 0.03064360707167709 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.74, "acc_stderr": 0.04408440022768081, "acc_norm": 0.74, "acc_norm_stderr": 0.04408440022768081 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.7132075471698113, "acc_stderr": 0.02783491252754407, "acc_norm": 0.7132075471698113, "acc_norm_stderr": 0.02783491252754407 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.8055555555555556, "acc_stderr": 0.03309615177059006, "acc_norm": 0.8055555555555556, "acc_norm_stderr": 0.03309615177059006 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.48, "acc_stderr": 0.050211673156867795, "acc_norm": 0.48, "acc_norm_stderr": 0.050211673156867795 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.6, "acc_stderr": 0.04923659639173309, "acc_norm": 0.6, "acc_norm_stderr": 0.04923659639173309 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.37, "acc_stderr": 0.04852365870939099, "acc_norm": 0.37, "acc_norm_stderr": 0.04852365870939099 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.6416184971098265, "acc_stderr": 0.03656343653353159, "acc_norm": 0.6416184971098265, "acc_norm_stderr": 0.03656343653353159 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.35294117647058826, "acc_stderr": 0.04755129616062946, "acc_norm": 0.35294117647058826, "acc_norm_stderr": 0.04755129616062946 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.79, "acc_stderr": 0.04093601807403326, "acc_norm": 0.79, "acc_norm_stderr": 0.04093601807403326 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.6893617021276596, "acc_stderr": 0.03025123757921317, "acc_norm": 0.6893617021276596, "acc_norm_stderr": 0.03025123757921317 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.4473684210526316, "acc_stderr": 0.04677473004491199, "acc_norm": 0.4473684210526316, "acc_norm_stderr": 0.04677473004491199 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.6206896551724138, "acc_stderr": 0.04043461861916747, "acc_norm": 0.6206896551724138, "acc_norm_stderr": 0.04043461861916747 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.4444444444444444, "acc_stderr": 0.02559185776138218, "acc_norm": 0.4444444444444444, "acc_norm_stderr": 0.02559185776138218 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.4603174603174603, "acc_stderr": 0.04458029125470973, "acc_norm": 0.4603174603174603, "acc_norm_stderr": 0.04458029125470973 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.48, "acc_stderr": 0.050211673156867795, "acc_norm": 0.48, "acc_norm_stderr": 0.050211673156867795 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.8129032258064516, "acc_stderr": 0.022185710092252252, "acc_norm": 0.8129032258064516, "acc_norm_stderr": 0.022185710092252252 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.5467980295566502, "acc_stderr": 0.035025446508458714, "acc_norm": 0.5467980295566502, "acc_norm_stderr": 0.035025446508458714 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.75, "acc_stderr": 0.04351941398892446, "acc_norm": 0.75, "acc_norm_stderr": 0.04351941398892446 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.8181818181818182, "acc_stderr": 0.0301176889295036, "acc_norm": 0.8181818181818182, "acc_norm_stderr": 0.0301176889295036 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.898989898989899, "acc_stderr": 0.021469735576055346, "acc_norm": 0.898989898989899, "acc_norm_stderr": 0.021469735576055346 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.9430051813471503, "acc_stderr": 0.01673108529360755, "acc_norm": 0.9430051813471503, "acc_norm_stderr": 0.01673108529360755 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.7, "acc_stderr": 0.02323458108842849, "acc_norm": 0.7, "acc_norm_stderr": 0.02323458108842849 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.3333333333333333, "acc_stderr": 0.028742040903948485, "acc_norm": 0.3333333333333333, "acc_norm_stderr": 0.028742040903948485 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.726890756302521, "acc_stderr": 0.028942004040998167, "acc_norm": 0.726890756302521, "acc_norm_stderr": 0.028942004040998167 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.45695364238410596, "acc_stderr": 0.04067325174247443, "acc_norm": 0.45695364238410596, "acc_norm_stderr": 0.04067325174247443 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.8825688073394495, "acc_stderr": 0.01380278022737734, "acc_norm": 0.8825688073394495, "acc_norm_stderr": 0.01380278022737734 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.5648148148148148, "acc_stderr": 0.033812000056435254, "acc_norm": 0.5648148148148148, "acc_norm_stderr": 0.033812000056435254 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.9166666666666666, "acc_stderr": 0.019398452135813902, "acc_norm": 0.9166666666666666, "acc_norm_stderr": 0.019398452135813902 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.8987341772151899, "acc_stderr": 0.019637720526065505, "acc_norm": 0.8987341772151899, "acc_norm_stderr": 0.019637720526065505 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.7847533632286996, "acc_stderr": 0.027584066602208274, "acc_norm": 0.7847533632286996, "acc_norm_stderr": 0.027584066602208274 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.8320610687022901, "acc_stderr": 0.032785485373431386, "acc_norm": 0.8320610687022901, "acc_norm_stderr": 0.032785485373431386 }, "harness|hendrycksTest-international_law|5": { "acc": 0.8760330578512396, "acc_stderr": 0.03008309871603521, "acc_norm": 0.8760330578512396, "acc_norm_stderr": 0.03008309871603521 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.8333333333333334, "acc_stderr": 0.03602814176392645, "acc_norm": 0.8333333333333334, "acc_norm_stderr": 0.03602814176392645 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.8159509202453987, "acc_stderr": 0.030446777687971726, "acc_norm": 0.8159509202453987, "acc_norm_stderr": 0.030446777687971726 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.48214285714285715, "acc_stderr": 0.047427623612430116, "acc_norm": 0.48214285714285715, "acc_norm_stderr": 0.047427623612430116 }, "harness|hendrycksTest-management|5": { "acc": 0.8252427184466019, "acc_stderr": 0.03760178006026621, "acc_norm": 0.8252427184466019, "acc_norm_stderr": 0.03760178006026621 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8888888888888888, "acc_stderr": 0.020588491316092365, "acc_norm": 0.8888888888888888, "acc_norm_stderr": 0.020588491316092365 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.71, "acc_stderr": 0.045604802157206845, "acc_norm": 0.71, "acc_norm_stderr": 0.045604802157206845 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.8659003831417624, "acc_stderr": 0.012185528166499978, "acc_norm": 0.8659003831417624, "acc_norm_stderr": 0.012185528166499978 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.7803468208092486, "acc_stderr": 0.022289638852617893, "acc_norm": 0.7803468208092486, "acc_norm_stderr": 0.022289638852617893 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.5463687150837989, "acc_stderr": 0.016650437588269076, "acc_norm": 0.5463687150837989, "acc_norm_stderr": 0.016650437588269076 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.7418300653594772, "acc_stderr": 0.025058503316958157, "acc_norm": 0.7418300653594772, "acc_norm_stderr": 0.025058503316958157 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.7717041800643086, "acc_stderr": 0.02383930331139821, "acc_norm": 0.7717041800643086, "acc_norm_stderr": 0.02383930331139821 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.845679012345679, "acc_stderr": 0.020100830999850987, "acc_norm": 0.845679012345679, "acc_norm_stderr": 0.020100830999850987 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.5319148936170213, "acc_stderr": 0.02976667507587387, "acc_norm": 0.5319148936170213, "acc_norm_stderr": 0.02976667507587387 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.5404172099087353, "acc_stderr": 0.012728446067669943, "acc_norm": 0.5404172099087353, "acc_norm_stderr": 0.012728446067669943 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.7205882352941176, "acc_stderr": 0.02725720260611495, "acc_norm": 0.7205882352941176, "acc_norm_stderr": 0.02725720260611495 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.761437908496732, "acc_stderr": 0.017242385828779613, "acc_norm": 0.761437908496732, "acc_norm_stderr": 0.017242385828779613 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6909090909090909, "acc_stderr": 0.044262946482000985, "acc_norm": 0.6909090909090909, "acc_norm_stderr": 0.044262946482000985 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.8, "acc_stderr": 0.02560737598657916, "acc_norm": 0.8, "acc_norm_stderr": 0.02560737598657916 }, "harness|hendrycksTest-sociology|5": { "acc": 0.8606965174129353, "acc_stderr": 0.024484487162913973, "acc_norm": 0.8606965174129353, "acc_norm_stderr": 0.024484487162913973 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.92, "acc_stderr": 0.0272659924344291, "acc_norm": 0.92, "acc_norm_stderr": 0.0272659924344291 }, "harness|hendrycksTest-virology|5": { "acc": 0.536144578313253, "acc_stderr": 0.03882310850890594, "acc_norm": 0.536144578313253, "acc_norm_stderr": 0.03882310850890594 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.8771929824561403, "acc_stderr": 0.025172984350155754, "acc_norm": 0.8771929824561403, "acc_norm_stderr": 0.025172984350155754 }, "harness|truthfulqa:mc|0": { "mc1": 0.3818849449204406, "mc1_stderr": 0.017008101939163495, "mc2": 0.5715047295306395, "mc2_stderr": 0.015147942199667246 }, "harness|winogrande|5": { "acc": 0.8176795580110497, "acc_stderr": 0.010851565594267204 }, "harness|gsm8k|5": { "acc": 0.489764973464746, "acc_stderr": 0.013769598923012404 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_TeeZee__Xwin-LM-70B-V0.1_Limarpv3
[ "region:us" ]
2024-01-25T11:02:02+00:00
{"pretty_name": "Evaluation run of TeeZee/Xwin-LM-70B-V0.1_Limarpv3", "dataset_summary": "Dataset automatically created during the evaluation run of model [TeeZee/Xwin-LM-70B-V0.1_Limarpv3](https://huggingface.co/TeeZee/Xwin-LM-70B-V0.1_Limarpv3) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_TeeZee__Xwin-LM-70B-V0.1_Limarpv3\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-01-25T10:59:31.899107](https://huggingface.co/datasets/open-llm-leaderboard/details_TeeZee__Xwin-LM-70B-V0.1_Limarpv3/blob/main/results_2024-01-25T10-59-31.899107.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.6907434014004448,\n \"acc_stderr\": 0.030406546643218627,\n \"acc_norm\": 0.6960374849812471,\n \"acc_norm_stderr\": 0.03098820704077189,\n \"mc1\": 0.3818849449204406,\n \"mc1_stderr\": 0.017008101939163495,\n \"mc2\": 0.5715047295306395,\n \"mc2_stderr\": 0.015147942199667246\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.6569965870307167,\n \"acc_stderr\": 0.013872423223718164,\n \"acc_norm\": 0.7081911262798635,\n \"acc_norm_stderr\": 0.013284525292403511\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6826329416450906,\n \"acc_stderr\": 0.004645003662067883,\n \"acc_norm\": 0.8697470623381797,\n \"acc_norm_stderr\": 0.0033589362798672655\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.29,\n \"acc_stderr\": 0.04560480215720684,\n \"acc_norm\": 0.29,\n \"acc_norm_stderr\": 0.04560480215720684\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.6444444444444445,\n \"acc_stderr\": 0.04135176749720385,\n \"acc_norm\": 0.6444444444444445,\n \"acc_norm_stderr\": 0.04135176749720385\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.8289473684210527,\n \"acc_stderr\": 0.03064360707167709,\n \"acc_norm\": 0.8289473684210527,\n \"acc_norm_stderr\": 0.03064360707167709\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.74,\n \"acc_stderr\": 0.04408440022768081,\n \"acc_norm\": 0.74,\n \"acc_norm_stderr\": 0.04408440022768081\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.7132075471698113,\n \"acc_stderr\": 0.02783491252754407,\n \"acc_norm\": 0.7132075471698113,\n \"acc_norm_stderr\": 0.02783491252754407\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.8055555555555556,\n \"acc_stderr\": 0.03309615177059006,\n \"acc_norm\": 0.8055555555555556,\n \"acc_norm_stderr\": 0.03309615177059006\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.48,\n \"acc_stderr\": 0.050211673156867795,\n \"acc_norm\": 0.48,\n \"acc_norm_stderr\": 0.050211673156867795\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.6,\n \"acc_stderr\": 0.04923659639173309,\n \"acc_norm\": 0.6,\n \"acc_norm_stderr\": 0.04923659639173309\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.37,\n \"acc_stderr\": 0.04852365870939099,\n \"acc_norm\": 0.37,\n \"acc_norm_stderr\": 0.04852365870939099\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.6416184971098265,\n \"acc_stderr\": 0.03656343653353159,\n \"acc_norm\": 0.6416184971098265,\n \"acc_norm_stderr\": 0.03656343653353159\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.35294117647058826,\n \"acc_stderr\": 0.04755129616062946,\n \"acc_norm\": 0.35294117647058826,\n \"acc_norm_stderr\": 0.04755129616062946\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.79,\n \"acc_stderr\": 0.04093601807403326,\n \"acc_norm\": 0.79,\n \"acc_norm_stderr\": 0.04093601807403326\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.6893617021276596,\n \"acc_stderr\": 0.03025123757921317,\n \"acc_norm\": 0.6893617021276596,\n \"acc_norm_stderr\": 0.03025123757921317\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.4473684210526316,\n \"acc_stderr\": 0.04677473004491199,\n \"acc_norm\": 0.4473684210526316,\n \"acc_norm_stderr\": 0.04677473004491199\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.6206896551724138,\n \"acc_stderr\": 0.04043461861916747,\n \"acc_norm\": 0.6206896551724138,\n \"acc_norm_stderr\": 0.04043461861916747\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.4444444444444444,\n \"acc_stderr\": 0.02559185776138218,\n \"acc_norm\": 0.4444444444444444,\n \"acc_norm_stderr\": 0.02559185776138218\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.4603174603174603,\n \"acc_stderr\": 0.04458029125470973,\n \"acc_norm\": 0.4603174603174603,\n \"acc_norm_stderr\": 0.04458029125470973\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.48,\n \"acc_stderr\": 0.050211673156867795,\n \"acc_norm\": 0.48,\n \"acc_norm_stderr\": 0.050211673156867795\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.8129032258064516,\n \"acc_stderr\": 0.022185710092252252,\n \"acc_norm\": 0.8129032258064516,\n \"acc_norm_stderr\": 0.022185710092252252\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.5467980295566502,\n \"acc_stderr\": 0.035025446508458714,\n \"acc_norm\": 0.5467980295566502,\n \"acc_norm_stderr\": 0.035025446508458714\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.75,\n \"acc_stderr\": 0.04351941398892446,\n \"acc_norm\": 0.75,\n \"acc_norm_stderr\": 0.04351941398892446\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.8181818181818182,\n \"acc_stderr\": 0.0301176889295036,\n \"acc_norm\": 0.8181818181818182,\n \"acc_norm_stderr\": 0.0301176889295036\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.898989898989899,\n \"acc_stderr\": 0.021469735576055346,\n \"acc_norm\": 0.898989898989899,\n \"acc_norm_stderr\": 0.021469735576055346\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.9430051813471503,\n \"acc_stderr\": 0.01673108529360755,\n \"acc_norm\": 0.9430051813471503,\n \"acc_norm_stderr\": 0.01673108529360755\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.7,\n \"acc_stderr\": 0.02323458108842849,\n \"acc_norm\": 0.7,\n \"acc_norm_stderr\": 0.02323458108842849\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.3333333333333333,\n \"acc_stderr\": 0.028742040903948485,\n \"acc_norm\": 0.3333333333333333,\n \"acc_norm_stderr\": 0.028742040903948485\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.726890756302521,\n \"acc_stderr\": 0.028942004040998167,\n \"acc_norm\": 0.726890756302521,\n \"acc_norm_stderr\": 0.028942004040998167\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.45695364238410596,\n \"acc_stderr\": 0.04067325174247443,\n \"acc_norm\": 0.45695364238410596,\n \"acc_norm_stderr\": 0.04067325174247443\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.8825688073394495,\n \"acc_stderr\": 0.01380278022737734,\n \"acc_norm\": 0.8825688073394495,\n \"acc_norm_stderr\": 0.01380278022737734\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.5648148148148148,\n \"acc_stderr\": 0.033812000056435254,\n \"acc_norm\": 0.5648148148148148,\n \"acc_norm_stderr\": 0.033812000056435254\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.9166666666666666,\n \"acc_stderr\": 0.019398452135813902,\n \"acc_norm\": 0.9166666666666666,\n \"acc_norm_stderr\": 0.019398452135813902\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.8987341772151899,\n \"acc_stderr\": 0.019637720526065505,\n \"acc_norm\": 0.8987341772151899,\n \"acc_norm_stderr\": 0.019637720526065505\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.7847533632286996,\n \"acc_stderr\": 0.027584066602208274,\n \"acc_norm\": 0.7847533632286996,\n \"acc_norm_stderr\": 0.027584066602208274\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.8320610687022901,\n \"acc_stderr\": 0.032785485373431386,\n \"acc_norm\": 0.8320610687022901,\n \"acc_norm_stderr\": 0.032785485373431386\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.8760330578512396,\n \"acc_stderr\": 0.03008309871603521,\n \"acc_norm\": 0.8760330578512396,\n \"acc_norm_stderr\": 0.03008309871603521\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.8333333333333334,\n \"acc_stderr\": 0.03602814176392645,\n \"acc_norm\": 0.8333333333333334,\n \"acc_norm_stderr\": 0.03602814176392645\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.8159509202453987,\n \"acc_stderr\": 0.030446777687971726,\n \"acc_norm\": 0.8159509202453987,\n \"acc_norm_stderr\": 0.030446777687971726\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.48214285714285715,\n \"acc_stderr\": 0.047427623612430116,\n \"acc_norm\": 0.48214285714285715,\n \"acc_norm_stderr\": 0.047427623612430116\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.8252427184466019,\n \"acc_stderr\": 0.03760178006026621,\n \"acc_norm\": 0.8252427184466019,\n \"acc_norm_stderr\": 0.03760178006026621\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8888888888888888,\n \"acc_stderr\": 0.020588491316092365,\n \"acc_norm\": 0.8888888888888888,\n \"acc_norm_stderr\": 0.020588491316092365\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.71,\n \"acc_stderr\": 0.045604802157206845,\n \"acc_norm\": 0.71,\n \"acc_norm_stderr\": 0.045604802157206845\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.8659003831417624,\n \"acc_stderr\": 0.012185528166499978,\n \"acc_norm\": 0.8659003831417624,\n \"acc_norm_stderr\": 0.012185528166499978\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.7803468208092486,\n \"acc_stderr\": 0.022289638852617893,\n \"acc_norm\": 0.7803468208092486,\n \"acc_norm_stderr\": 0.022289638852617893\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.5463687150837989,\n \"acc_stderr\": 0.016650437588269076,\n \"acc_norm\": 0.5463687150837989,\n \"acc_norm_stderr\": 0.016650437588269076\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.7418300653594772,\n \"acc_stderr\": 0.025058503316958157,\n \"acc_norm\": 0.7418300653594772,\n \"acc_norm_stderr\": 0.025058503316958157\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.7717041800643086,\n \"acc_stderr\": 0.02383930331139821,\n \"acc_norm\": 0.7717041800643086,\n \"acc_norm_stderr\": 0.02383930331139821\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.845679012345679,\n \"acc_stderr\": 0.020100830999850987,\n \"acc_norm\": 0.845679012345679,\n \"acc_norm_stderr\": 0.020100830999850987\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.5319148936170213,\n \"acc_stderr\": 0.02976667507587387,\n \"acc_norm\": 0.5319148936170213,\n \"acc_norm_stderr\": 0.02976667507587387\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.5404172099087353,\n \"acc_stderr\": 0.012728446067669943,\n \"acc_norm\": 0.5404172099087353,\n \"acc_norm_stderr\": 0.012728446067669943\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.7205882352941176,\n \"acc_stderr\": 0.02725720260611495,\n \"acc_norm\": 0.7205882352941176,\n \"acc_norm_stderr\": 0.02725720260611495\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.761437908496732,\n \"acc_stderr\": 0.017242385828779613,\n \"acc_norm\": 0.761437908496732,\n \"acc_norm_stderr\": 0.017242385828779613\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6909090909090909,\n \"acc_stderr\": 0.044262946482000985,\n \"acc_norm\": 0.6909090909090909,\n \"acc_norm_stderr\": 0.044262946482000985\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.8,\n \"acc_stderr\": 0.02560737598657916,\n \"acc_norm\": 0.8,\n \"acc_norm_stderr\": 0.02560737598657916\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.8606965174129353,\n \"acc_stderr\": 0.024484487162913973,\n \"acc_norm\": 0.8606965174129353,\n \"acc_norm_stderr\": 0.024484487162913973\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.92,\n \"acc_stderr\": 0.0272659924344291,\n \"acc_norm\": 0.92,\n \"acc_norm_stderr\": 0.0272659924344291\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.536144578313253,\n \"acc_stderr\": 0.03882310850890594,\n \"acc_norm\": 0.536144578313253,\n \"acc_norm_stderr\": 0.03882310850890594\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.8771929824561403,\n \"acc_stderr\": 0.025172984350155754,\n \"acc_norm\": 0.8771929824561403,\n \"acc_norm_stderr\": 0.025172984350155754\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.3818849449204406,\n \"mc1_stderr\": 0.017008101939163495,\n \"mc2\": 0.5715047295306395,\n \"mc2_stderr\": 0.015147942199667246\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.8176795580110497,\n \"acc_stderr\": 0.010851565594267204\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.489764973464746,\n \"acc_stderr\": 0.013769598923012404\n }\n}\n```", "repo_url": "https://huggingface.co/TeeZee/Xwin-LM-70B-V0.1_Limarpv3", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_01_25T10_59_31.899107", "path": ["**/details_harness|arc:challenge|25_2024-01-25T10-59-31.899107.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-01-25T10-59-31.899107.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_01_25T10_59_31.899107", "path": ["**/details_harness|gsm8k|5_2024-01-25T10-59-31.899107.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-01-25T10-59-31.899107.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_01_25T10_59_31.899107", "path": ["**/details_harness|hellaswag|10_2024-01-25T10-59-31.899107.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-01-25T10-59-31.899107.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_01_25T10_59_31.899107", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-25T10-59-31.899107.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-25T10-59-31.899107.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-25T10-59-31.899107.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-25T10-59-31.899107.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-25T10-59-31.899107.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-25T10-59-31.899107.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-25T10-59-31.899107.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-25T10-59-31.899107.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-25T10-59-31.899107.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-25T10-59-31.899107.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-25T10-59-31.899107.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-25T10-59-31.899107.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-25T10-59-31.899107.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-25T10-59-31.899107.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-25T10-59-31.899107.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-25T10-59-31.899107.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-25T10-59-31.899107.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-25T10-59-31.899107.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-25T10-59-31.899107.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-25T10-59-31.899107.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-25T10-59-31.899107.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-25T10-59-31.899107.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-25T10-59-31.899107.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-25T10-59-31.899107.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-25T10-59-31.899107.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-25T10-59-31.899107.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-25T10-59-31.899107.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-25T10-59-31.899107.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-25T10-59-31.899107.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-25T10-59-31.899107.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-25T10-59-31.899107.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-25T10-59-31.899107.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-25T10-59-31.899107.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-25T10-59-31.899107.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-25T10-59-31.899107.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-25T10-59-31.899107.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-25T10-59-31.899107.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-25T10-59-31.899107.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-25T10-59-31.899107.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-25T10-59-31.899107.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-25T10-59-31.899107.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-25T10-59-31.899107.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-25T10-59-31.899107.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-25T10-59-31.899107.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-25T10-59-31.899107.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-25T10-59-31.899107.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-25T10-59-31.899107.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-25T10-59-31.899107.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-25T10-59-31.899107.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-25T10-59-31.899107.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-25T10-59-31.899107.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-25T10-59-31.899107.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-25T10-59-31.899107.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-25T10-59-31.899107.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-25T10-59-31.899107.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-25T10-59-31.899107.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-25T10-59-31.899107.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-25T10-59-31.899107.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-25T10-59-31.899107.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-25T10-59-31.899107.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-25T10-59-31.899107.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-25T10-59-31.899107.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-25T10-59-31.899107.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-25T10-59-31.899107.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-25T10-59-31.899107.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-25T10-59-31.899107.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-25T10-59-31.899107.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-25T10-59-31.899107.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-25T10-59-31.899107.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-25T10-59-31.899107.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-25T10-59-31.899107.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-25T10-59-31.899107.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-25T10-59-31.899107.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-25T10-59-31.899107.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-25T10-59-31.899107.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-25T10-59-31.899107.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-25T10-59-31.899107.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-25T10-59-31.899107.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-25T10-59-31.899107.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-25T10-59-31.899107.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-25T10-59-31.899107.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-25T10-59-31.899107.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-25T10-59-31.899107.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-25T10-59-31.899107.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-25T10-59-31.899107.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-25T10-59-31.899107.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-25T10-59-31.899107.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-25T10-59-31.899107.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-25T10-59-31.899107.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-25T10-59-31.899107.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-25T10-59-31.899107.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-25T10-59-31.899107.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-25T10-59-31.899107.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-25T10-59-31.899107.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-25T10-59-31.899107.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-25T10-59-31.899107.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-25T10-59-31.899107.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-25T10-59-31.899107.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-25T10-59-31.899107.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-25T10-59-31.899107.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-25T10-59-31.899107.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-25T10-59-31.899107.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-25T10-59-31.899107.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-25T10-59-31.899107.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-25T10-59-31.899107.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-25T10-59-31.899107.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-25T10-59-31.899107.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-25T10-59-31.899107.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-25T10-59-31.899107.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-25T10-59-31.899107.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-25T10-59-31.899107.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-25T10-59-31.899107.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-25T10-59-31.899107.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-25T10-59-31.899107.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_01_25T10_59_31.899107", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-25T10-59-31.899107.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-25T10-59-31.899107.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_01_25T10_59_31.899107", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-25T10-59-31.899107.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-25T10-59-31.899107.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_01_25T10_59_31.899107", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-25T10-59-31.899107.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-25T10-59-31.899107.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_01_25T10_59_31.899107", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-25T10-59-31.899107.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-25T10-59-31.899107.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_01_25T10_59_31.899107", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-25T10-59-31.899107.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-25T10-59-31.899107.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_01_25T10_59_31.899107", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-25T10-59-31.899107.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-25T10-59-31.899107.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_01_25T10_59_31.899107", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-25T10-59-31.899107.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-25T10-59-31.899107.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_01_25T10_59_31.899107", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-25T10-59-31.899107.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-25T10-59-31.899107.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_01_25T10_59_31.899107", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-25T10-59-31.899107.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-25T10-59-31.899107.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_01_25T10_59_31.899107", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-25T10-59-31.899107.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-25T10-59-31.899107.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_01_25T10_59_31.899107", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-25T10-59-31.899107.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-25T10-59-31.899107.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_01_25T10_59_31.899107", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-25T10-59-31.899107.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-25T10-59-31.899107.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_01_25T10_59_31.899107", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-25T10-59-31.899107.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-25T10-59-31.899107.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_01_25T10_59_31.899107", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-25T10-59-31.899107.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-25T10-59-31.899107.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_01_25T10_59_31.899107", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-25T10-59-31.899107.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-25T10-59-31.899107.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_01_25T10_59_31.899107", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-25T10-59-31.899107.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-25T10-59-31.899107.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_01_25T10_59_31.899107", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-25T10-59-31.899107.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-25T10-59-31.899107.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_01_25T10_59_31.899107", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-25T10-59-31.899107.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-25T10-59-31.899107.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_01_25T10_59_31.899107", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-25T10-59-31.899107.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-25T10-59-31.899107.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_01_25T10_59_31.899107", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-25T10-59-31.899107.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-25T10-59-31.899107.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_01_25T10_59_31.899107", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-25T10-59-31.899107.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-25T10-59-31.899107.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_01_25T10_59_31.899107", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-25T10-59-31.899107.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-25T10-59-31.899107.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_01_25T10_59_31.899107", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-25T10-59-31.899107.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-25T10-59-31.899107.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_01_25T10_59_31.899107", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-25T10-59-31.899107.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-25T10-59-31.899107.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_01_25T10_59_31.899107", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-25T10-59-31.899107.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-25T10-59-31.899107.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_01_25T10_59_31.899107", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-25T10-59-31.899107.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-25T10-59-31.899107.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_01_25T10_59_31.899107", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-25T10-59-31.899107.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-25T10-59-31.899107.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_01_25T10_59_31.899107", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-25T10-59-31.899107.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-25T10-59-31.899107.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_01_25T10_59_31.899107", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-25T10-59-31.899107.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-25T10-59-31.899107.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_01_25T10_59_31.899107", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-25T10-59-31.899107.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-25T10-59-31.899107.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_01_25T10_59_31.899107", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-25T10-59-31.899107.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-25T10-59-31.899107.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_01_25T10_59_31.899107", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-25T10-59-31.899107.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-25T10-59-31.899107.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_01_25T10_59_31.899107", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-25T10-59-31.899107.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-25T10-59-31.899107.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_01_25T10_59_31.899107", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-25T10-59-31.899107.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-25T10-59-31.899107.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_01_25T10_59_31.899107", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-25T10-59-31.899107.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-25T10-59-31.899107.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_01_25T10_59_31.899107", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-25T10-59-31.899107.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-25T10-59-31.899107.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_01_25T10_59_31.899107", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-25T10-59-31.899107.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-25T10-59-31.899107.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_01_25T10_59_31.899107", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-25T10-59-31.899107.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-25T10-59-31.899107.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_01_25T10_59_31.899107", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-25T10-59-31.899107.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-25T10-59-31.899107.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_01_25T10_59_31.899107", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-25T10-59-31.899107.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-25T10-59-31.899107.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_01_25T10_59_31.899107", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-25T10-59-31.899107.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-25T10-59-31.899107.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_01_25T10_59_31.899107", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-25T10-59-31.899107.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-25T10-59-31.899107.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_01_25T10_59_31.899107", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-25T10-59-31.899107.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-25T10-59-31.899107.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_01_25T10_59_31.899107", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-25T10-59-31.899107.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-25T10-59-31.899107.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_01_25T10_59_31.899107", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-25T10-59-31.899107.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-25T10-59-31.899107.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_01_25T10_59_31.899107", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-25T10-59-31.899107.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-25T10-59-31.899107.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_01_25T10_59_31.899107", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-25T10-59-31.899107.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-25T10-59-31.899107.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_01_25T10_59_31.899107", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-25T10-59-31.899107.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-25T10-59-31.899107.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_01_25T10_59_31.899107", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-25T10-59-31.899107.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-25T10-59-31.899107.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_01_25T10_59_31.899107", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-25T10-59-31.899107.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-25T10-59-31.899107.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_01_25T10_59_31.899107", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-25T10-59-31.899107.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-25T10-59-31.899107.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_01_25T10_59_31.899107", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-25T10-59-31.899107.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-25T10-59-31.899107.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_01_25T10_59_31.899107", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-25T10-59-31.899107.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-25T10-59-31.899107.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_01_25T10_59_31.899107", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-25T10-59-31.899107.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-25T10-59-31.899107.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_01_25T10_59_31.899107", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-25T10-59-31.899107.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-25T10-59-31.899107.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_01_25T10_59_31.899107", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-25T10-59-31.899107.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-25T10-59-31.899107.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_01_25T10_59_31.899107", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-25T10-59-31.899107.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-25T10-59-31.899107.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_01_25T10_59_31.899107", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-25T10-59-31.899107.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-25T10-59-31.899107.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_01_25T10_59_31.899107", "path": ["**/details_harness|winogrande|5_2024-01-25T10-59-31.899107.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-01-25T10-59-31.899107.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_01_25T10_59_31.899107", "path": ["results_2024-01-25T10-59-31.899107.parquet"]}, {"split": "latest", "path": ["results_2024-01-25T10-59-31.899107.parquet"]}]}]}
2024-01-25T11:02:29+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of TeeZee/Xwin-LM-70B-V0.1_Limarpv3 Dataset automatically created during the evaluation run of model TeeZee/Xwin-LM-70B-V0.1_Limarpv3 on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-01-25T10:59:31.899107(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of TeeZee/Xwin-LM-70B-V0.1_Limarpv3\n\n\n\nDataset automatically created during the evaluation run of model TeeZee/Xwin-LM-70B-V0.1_Limarpv3 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-25T10:59:31.899107(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of TeeZee/Xwin-LM-70B-V0.1_Limarpv3\n\n\n\nDataset automatically created during the evaluation run of model TeeZee/Xwin-LM-70B-V0.1_Limarpv3 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-25T10:59:31.899107(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ 6, 197, 67, 4, 40, 29, 3, 4, 9, 6, 5, 7, 4, 7, 10, 9, 5, 9, 8, 10, 46, 8, 7, 10, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of TeeZee/Xwin-LM-70B-V0.1_Limarpv3\n\n\n\nDataset automatically created during the evaluation run of model TeeZee/Xwin-LM-70B-V0.1_Limarpv3 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2024-01-25T10:59:31.899107(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):## Dataset Details### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:## Uses### Direct Use### Out-of-Scope Use## Dataset Structure## Dataset Creation### Curation Rationale### Source Data#### Data Collection and Processing#### Who are the source data producers?### Annotations [optional]#### Annotation process#### Who are the annotators?#### Personal and Sensitive Information## Bias, Risks, and Limitations### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:## Glossary [optional]## More Information [optional]" ]
[ -0.05938422307372093, 0.20205867290496826, -0.0057266997173428535, 0.03730776533484459, 0.08641650527715683, -0.010042536072432995, 0.012591607868671417, 0.11272609233856201, -0.009849273599684238, 0.16069082915782928, -0.007293338421732187, 0.1166624203324318, 0.08323366940021515, 0.17367318272590637, 0.013927942141890526, -0.13118529319763184, 0.013966665603220463, -0.07426589727401733, 0.0759694054722786, 0.08700605481863022, 0.09714361280202866, -0.09075170010328293, 0.06560421735048294, -0.038822274655103683, -0.01924939453601837, 0.011823390610516071, -0.08369570225477219, -0.04180004447698593, 0.09539101272821426, 0.08796730637550354, 0.02312488481402397, -0.028360886499285698, 0.02011013776063919, -0.2513141334056854, 0.015607906505465508, 0.08338527381420135, 0.007349092047661543, 0.05255012959241867, 0.1267850399017334, -0.05750530585646629, 0.06378999352455139, -0.09142553061246872, 0.05982024222612381, 0.04021349549293518, -0.11309318989515305, -0.12609340250492096, -0.1406976580619812, 0.01324634812772274, 0.07625477015972137, 0.05318642407655716, -0.027859235182404518, 0.15116427838802338, -0.02092765085399151, 0.04463204741477966, 0.11731061339378357, -0.1190880611538887, -0.030036618933081627, 0.015947584062814713, 0.027491386979818344, 0.053847189992666245, -0.09768246114253998, -0.016294917091727257, 0.03191721811890602, 0.05232251062989235, 0.033276528120040894, 0.0007168189622461796, -0.07488588988780975, 0.016791051253676414, -0.11595217138528824, -0.09940928220748901, 0.20970825850963593, 0.008368337526917458, -0.035788193345069885, -0.14515772461891174, -0.03288319706916809, 0.005388205870985985, 0.0012794329086318612, -0.03515708073973656, 0.01106976717710495, -0.023616347461938858, 0.06521335989236832, -0.021312184631824493, -0.10169979929924011, -0.00512116402387619, -0.016534654423594475, 0.03993069753050804, 0.012448725290596485, -0.01335224974900484, 0.004688703920692205, 0.11254540830850601, -0.026052871719002724, -0.09521550685167313, -0.08252882212400436, -0.04451075196266174, -0.11595042794942856, -0.05549490824341774, 0.023217564448714256, -0.07379449158906937, 0.040739014744758606, 0.22217604517936707, -0.04092452675104141, 0.0349755585193634, -0.09809306263923645, -0.007232865318655968, 0.13402117788791656, 0.07223617285490036, -0.0642591193318367, -0.08274901658296585, -0.005613705143332481, 0.013587447814643383, 0.02744263783097267, -0.019418837502598763, 0.017260907217860222, 0.06448309123516083, 0.06066035106778145, 0.11635486781597137, 0.11472266167402267, 0.022580811753869057, -0.06817050278186798, -0.02680845558643341, 0.19305871427059174, -0.16768766939640045, 0.012124632485210896, 0.011208772659301758, -0.04113833233714104, -0.10176937282085419, 0.05173953250050545, -0.014269843697547913, -0.07507377862930298, 0.09079617261886597, -0.0570199154317379, -0.06955030560493469, -0.08376935124397278, -0.056408219039440155, 0.05675051733851433, 0.0002151675580535084, -0.026248374953866005, -0.06792376935482025, -0.11700527369976044, -0.08265738934278488, 0.02902994491159916, -0.07588531076908112, -0.03039020672440529, 0.02211356908082962, -0.020521916449069977, -0.016169633716344833, -0.018188446760177612, 0.12499741464853287, -0.05926923453807831, 0.028055664151906967, 0.007702128030359745, 0.004376063123345375, 0.07981787621974945, 0.05189704895019531, -0.11931370943784714, 0.07437892258167267, -0.08054907619953156, 0.1106615960597992, -0.09149537980556488, -0.005950766149908304, -0.13551588356494904, 0.008105246350169182, -0.024455375969409943, 0.01274664606899023, 0.014888363890349865, 0.08700869977474213, -0.25653746724128723, 0.023053672164678574, 0.10712435096502304, -0.09185072779655457, -0.10074616968631744, 0.06688215583562851, -0.041913341730833054, 0.07173704355955124, 0.05630189925432205, 0.08257691562175751, 0.09760258346796036, -0.07297805696725845, -0.12128003686666489, -0.07417374849319458, -0.028761176392436028, 0.12289640307426453, 0.053669560700654984, -0.054383814334869385, 0.12552030384540558, 0.039611876010894775, -0.014308038167655468, -0.0865463986992836, -0.014331282116472721, -0.06486629694700241, -0.023403136059641838, -0.03184208646416664, -0.08597324043512344, -0.0007568493601866066, -0.08766457438468933, -0.03489113599061966, -0.09539639204740524, 0.024206561967730522, 0.0851488709449768, -0.007887336425483227, 0.013035215437412262, -0.07055418193340302, 0.04290799796581268, 0.006019292399287224, 0.02606426738202572, -0.21336543560028076, -0.09129462391138077, 0.035094305872917175, -0.10231035947799683, 0.057772859930992126, 0.008558866567909718, 0.007268737070262432, 0.030060134828090668, 0.0007651358610019088, 0.01411796547472477, 0.0012229898711666465, -0.003824157640337944, -0.009061495773494244, -0.14184948801994324, -0.054412636905908585, -0.07992131263017654, 0.04903927817940712, -0.13354985415935516, -0.020201444625854492, 0.07520827651023865, 0.1511658877134323, 0.00529162771999836, -0.07531847059726715, 0.06814171373844147, -0.0006382205174304545, -0.04885724559426308, -0.07053014636039734, -0.004657685291022062, -0.01711122877895832, 0.0618661604821682, 0.020684970542788506, -0.19670885801315308, -0.138970747590065, 0.07700955122709274, 0.10703123360872269, -0.07107847929000854, -0.043352674692869186, -0.06161728501319885, -0.055232055485248566, -0.10036829859018326, -0.07006321847438812, 0.0767962858080864, 0.08359117805957794, 0.03098743036389351, -0.06848045438528061, -0.07457420974969864, -0.0014553297078236938, 0.037078410387039185, -0.07154569029808044, 0.09023484587669373, 0.07481078803539276, -0.10223111510276794, 0.10296396166086197, 0.02145502343773842, 0.12156138569116592, 0.07188592106103897, 0.016182899475097656, -0.10060625523328781, -0.016520416364073753, 0.04675768315792084, 0.0238186065107584, 0.08620669692754745, -0.0412449948489666, 0.033469878137111664, 0.0789870023727417, -0.01241303700953722, 0.04856763407588005, -0.06230325624346733, 0.04088450223207474, 0.037064023315906525, 0.0010236792732030153, 0.026320384815335274, 0.004229913465678692, 0.01771513558924198, 0.06267916411161423, 0.027427464723587036, 0.1042865440249443, -0.013996494933962822, -0.03925923630595207, -0.08957847207784653, 0.14499115943908691, -0.10602808743715286, -0.27943721413612366, -0.1450553834438324, -0.04370489716529846, -0.028602175414562225, -0.016262583434581757, 0.0541936494410038, -0.012431712821125984, -0.1120833232998848, -0.0970037430524826, 0.050704095512628555, 0.01956971548497677, -0.12534524500370026, -0.035953156650066376, 0.049248624593019485, 0.009543988853693008, -0.16761744022369385, 0.03565668314695358, 0.04274518042802811, -0.03859005123376846, 0.010580739006400108, 0.0989498421549797, 0.12043027579784393, 0.07477382570505142, 0.031720928847789764, -0.017540227621793747, 0.0023337150923907757, 0.1978069394826889, -0.10038451850414276, 0.04996776580810547, 0.11165043711662292, -0.03869457170367241, 0.06796617805957794, 0.17570534348487854, 0.005718333646655083, -0.10121901333332062, 0.03870798274874687, 0.08556440472602844, -0.05927012115716934, -0.272152841091156, -0.08320349454879761, -0.03646077960729599, -0.01632913574576378, 0.09587077796459198, 0.0651186928153038, 0.0009712661849334836, 0.02076113037765026, -0.10517352819442749, -0.019218016415834427, -0.05709722265601158, 0.0636894628405571, 0.08663952350616455, 0.005141750443726778, 0.04320582002401352, -0.03665497526526451, 0.044195204973220825, 0.11324681341648102, 0.05203312635421753, 0.16232572495937347, -0.03483671322464943, 0.15499509871006012, 0.08780105412006378, 0.09293416887521744, -0.053411681205034256, 0.05765242874622345, -0.0001389606622979045, 0.07043232768774033, -0.005500663071870804, -0.10081630200147629, -0.05534441024065018, 0.08611050993204117, 0.004470564890652895, -0.04569447785615921, 0.04933460056781769, -0.02649858593940735, 0.04993126168847084, 0.1579188108444214, -0.00004894534868071787, -0.15088601410388947, -0.05694147199392319, 0.05718875303864479, -0.026762325316667557, -0.11436488479375839, -0.027582773938775063, 0.06881105899810791, -0.13498322665691376, 0.01955418288707733, -0.024390997365117073, 0.09171540290117264, -0.11655014008283615, -0.022581228986382484, -0.014885332435369492, 0.10413125902414322, -0.00901393499225378, 0.11612570285797119, -0.14129941165447235, 0.11104527860879898, 0.004034305457025766, 0.052218858152627945, -0.0880180224776268, 0.06891519576311111, -0.02967204339802265, -0.046988971531391144, 0.1346059888601303, -0.005852940026670694, -0.11579970270395279, -0.036321986466646194, -0.12220924347639084, 0.009363695047795773, 0.04825668781995773, -0.09019070863723755, 0.1017848551273346, 0.02135075256228447, -0.012030120939016342, -0.022060208022594452, -0.006638775113970041, -0.13837715983390808, -0.21934588253498077, 0.10058296471834183, -0.10066855698823929, 0.06143900752067566, -0.038648780435323715, -0.0412428081035614, -0.03578271344304085, 0.19052550196647644, -0.06885606795549393, -0.06928552687168121, -0.12368356436491013, 0.04113391041755676, 0.1808796375989914, -0.05731529742479324, 0.04461400583386421, -0.04380417987704277, 0.1803773194551468, 0.00009575264266459271, -0.049924086779356, -0.003958818502724171, -0.08611802011728287, -0.13704228401184082, -0.038258206099271774, 0.13484643399715424, 0.05106847360730171, 0.008050400763750076, 0.011317828670144081, 0.0406946986913681, -0.0009070236701518297, -0.08797939121723175, 0.038835473358631134, 0.07860208302736282, 0.09397588670253754, 0.025309212505817413, -0.047368526458740234, -0.15147432684898376, -0.1113133355975151, -0.0882052555680275, 0.0708179771900177, 0.13905061781406403, -0.054761484265327454, 0.13968655467033386, 0.11966510117053986, -0.10581393539905548, -0.18252824246883392, -0.05301748588681221, 0.022319188341498375, -0.009505752474069595, 0.1061905100941658, -0.19982771575450897, 0.07010437548160553, 0.06075295805931091, -0.001802288112230599, 0.09681839495897293, -0.22445936501026154, -0.13390636444091797, 0.048567481338977814, 0.023256808519363403, -0.22078338265419006, -0.14640362560749054, -0.10740959644317627, -0.017689695581793785, -0.14838732779026031, 0.14217106997966766, 0.005797624122351408, 0.036508385092020035, -0.008982605300843716, 0.07504759728908539, 0.05066344141960144, -0.05478142201900482, 0.11926059424877167, 0.001698217005468905, 0.026608923450112343, -0.08922077715396881, -0.03301258012652397, 0.02389177493751049, -0.05310001224279404, 0.09661884605884552, 0.03660273179411888, 0.05124243348836899, -0.09133369475603104, -0.030333111062645912, -0.04654664173722267, 0.06957083195447922, -0.0703173354268074, -0.061344970017671585, -0.052956584841012955, 0.07876868546009064, 0.07696499675512314, -0.028137510642409325, 0.03132859990000725, -0.02205522730946541, 0.02668466977775097, 0.21506424248218536, 0.09671740978956223, 0.02176983840763569, -0.10428104549646378, -0.015541652217507362, -0.004946708679199219, -0.001054614083841443, -0.13893574476242065, 0.04439624771475792, 0.09539569914340973, 0.04478369653224945, 0.07741425186395645, -0.026331493631005287, -0.18718142807483673, 0.00027319835498929024, 0.07846350967884064, -0.09504300355911255, -0.18442875146865845, 0.02729487605392933, 0.11880284547805786, -0.13523025810718536, -0.052507705986499786, 0.09189408272504807, 0.021671460941433907, -0.03071623481810093, 0.005614930763840675, 0.07247357070446014, 0.05321769788861275, 0.11244911700487137, 0.008804393000900745, 0.04515276849269867, -0.07856559753417969, 0.10125880688428879, 0.14184355735778809, -0.08365856856107712, 0.03294307738542557, 0.07005304098129272, -0.05250855162739754, -0.05416823551058769, 0.03787414729595184, -0.0005128236953169107, 0.022618981078267097, -0.0437810979783535, 0.030620142817497253, -0.02917521633207798, 0.05850457027554512, 0.1120029166340828, -0.0016342136077582836, 0.035249385982751846, 0.01946764998137951, -0.014564643613994122, -0.07494457066059113, 0.11947926133871078, 0.05323044955730438, 0.036885034292936325, -0.04904157668352127, 0.028561512008309364, 0.017943700775504112, -0.0031680045649409294, 0.015526124276220798, -0.04453214630484581, -0.03516891598701477, -0.011489619500935078, -0.14727474749088287, 0.027843007817864418, -0.0889490395784378, -0.004922778811305761, -0.005692508537322283, -0.023747161030769348, -0.028875403106212616, 0.02846083976328373, -0.05118168890476227, -0.07110297679901123, -0.05011051520705223, 0.10911664366722107, -0.20416682958602905, -0.007310803048312664, 0.08631113171577454, -0.07798026502132416, 0.07355421781539917, 0.0067212507128715515, -0.016642794013023376, 0.006449210457503796, -0.10755769163370132, -0.031579334288835526, -0.021064244210720062, 0.0686296746134758, 0.033116258680820465, -0.16384436190128326, -0.00871257297694683, 0.01601213961839676, -0.06946365535259247, -0.023249397054314613, 0.058696381747722626, -0.15165287256240845, 0.03179610148072243, 0.0480138324201107, -0.04841557890176773, -0.032379232347011566, 0.05586263909935951, 0.05306137725710869, 0.0015062877209857106, 0.10780677944421768, 0.0004930595750920475, 0.037922248244285583, -0.16351929306983948, -0.03914701193571091, -0.004632506053894758, 0.007733526639640331, 0.0011039513628929853, 0.026581306010484695, 0.047121286392211914, -0.01325166691094637, 0.19343812763690948, -0.003996055107563734, 0.06294871866703033, 0.035171736031770706, 0.011167855933308601, -0.07870616018772125, 0.02234337478876114, 0.043171174824237823, -0.0049707405269145966, 0.022409208118915558, 0.027473971247673035, -0.036081667989492416, -0.03448018431663513, -0.045229118317365646, 0.08424528688192368, 0.14349353313446045, 0.16268308460712433, -0.028979266062378883, 0.07652320712804794, -0.1607298105955124, -0.07353732734918594, 0.034347616136074066, -0.04473494738340378, 0.044135525822639465, -0.0617392472922802, 0.040397319942712784, 0.07569365948438644, -0.12006113678216934, 0.13187159597873688, -0.06229911744594574, -0.05151136964559555, -0.03211306780576706, -0.14116200804710388, -0.052644629031419754, 0.012678975239396095, 0.010423957370221615, -0.11246024817228317, 0.1068124771118164, 0.10635984688997269, -0.009142307564616203, -0.01627778820693493, 0.09675153344869614, -0.06001663953065872, -0.07722228020429611, -0.02989882603287697, 0.023565813899040222, 0.02200850285589695, -0.0117492051795125, 0.09240071475505829, 0.007307013496756554, 0.07942808419466019, 0.07038719207048416, 0.09266802668571472, 0.07823009788990021, 0.023409007117152214, -0.04676700010895729, -0.05594059079885483, -0.006330124102532864, -0.013677356764674187, -0.05058664083480835, 0.16271936893463135, 0.049180932343006134, 0.008738786913454533, 0.016886843368411064, 0.2128472775220871, 0.0017867538845166564, -0.0877825990319252, -0.14250332117080688, 0.09296265989542007, 0.008715607225894928, 0.015461225062608719, 0.04230175167322159, -0.13464760780334473, 0.03327851742506027, 0.16875101625919342, 0.09836453944444656, 0.0354590117931366, 0.0038123703561723232, 0.02614441327750683, 0.023638570681214333, -0.05167454108595848, 0.04435587674379349, 0.04558464139699936, 0.16765239834785461, -0.05118595436215401, 0.05477936938405037, -0.027699913829565048, -0.021827595308423042, -0.031054474413394928, 0.0808335542678833, -0.04491548612713814, 0.012582923285663128, -0.046384915709495544, 0.10002326965332031, -0.034480150789022446, -0.28496184945106506, -0.017049448564648628, -0.08755393326282501, -0.13504092395305634, -0.018107278272509575, 0.052689965814352036, -0.023441391065716743, 0.03114589862525463, 0.0398096889257431, -0.01728389598429203, 0.2086586356163025, 0.01688074879348278, -0.07502062618732452, -0.044201258569955826, 0.08366519957780838, -0.05395565927028656, 0.24872718751430511, 0.0005594475660473108, 0.05440814048051834, 0.0972815603017807, -0.020539043471217155, -0.16634032130241394, 0.02906651422381401, 0.10211561620235443, -0.014795360155403614, 0.07213353365659714, 0.1444970816373825, -0.017784258350729942, 0.12651678919792175, 0.05425047501921654, -0.0020968003664165735, 0.0352218933403492, 0.07084781676530838, 0.03554070368409157, -0.09024055302143097, 0.07475022971630096, -0.08328552544116974, 0.13314150273799896, 0.11427383869886398, -0.03086313046514988, -0.0037506178487092257, -0.08092199265956879, 0.06486321240663528, -0.02874920703470707, 0.12029867619276047, -0.005708007607609034, -0.16513121128082275, 0.0476282462477684, 0.03984518721699715, 0.06520215421915054, -0.19882239401340485, -0.06720305234193802, 0.10864121466875076, -0.05429081618785858, -0.009478474035859108, 0.10917928069829941, 0.04372784495353699, 0.017902646213769913, -0.06306306272745132, -0.05411697179079056, -0.006698231678456068, 0.11037716269493103, -0.07634510844945908, -0.02604941464960575 ]
ff2820bc67ff5fe6e1fc1aa224424cf50a7e0687
# Dataset Card for Evaluation run of TeeZee/Xwin-LM-70B-V0.1_Jannie <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [TeeZee/Xwin-LM-70B-V0.1_Jannie](https://huggingface.co/TeeZee/Xwin-LM-70B-V0.1_Jannie) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_TeeZee__Xwin-LM-70B-V0.1_Jannie", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-01-25T10:59:59.024492](https://huggingface.co/datasets/open-llm-leaderboard/details_TeeZee__Xwin-LM-70B-V0.1_Jannie/blob/main/results_2024-01-25T10-59-59.024492.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.6920145646239103, "acc_stderr": 0.03050636265571641, "acc_norm": 0.6988351160849339, "acc_norm_stderr": 0.031092908305197647, "mc1": 0.40514075887392903, "mc1_stderr": 0.017185611727753368, "mc2": 0.6014420746258905, "mc2_stderr": 0.015084429602270856 }, "harness|arc:challenge|25": { "acc": 0.6629692832764505, "acc_stderr": 0.013813476652902276, "acc_norm": 0.71160409556314, "acc_norm_stderr": 0.013238394422428178 }, "harness|hellaswag|10": { "acc": 0.6803425612427804, "acc_stderr": 0.004653907471785645, "acc_norm": 0.8685520812587134, "acc_norm_stderr": 0.0033719902188524583 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.37, "acc_stderr": 0.04852365870939099, "acc_norm": 0.37, "acc_norm_stderr": 0.04852365870939099 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.6370370370370371, "acc_stderr": 0.04153948404742399, "acc_norm": 0.6370370370370371, "acc_norm_stderr": 0.04153948404742399 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.8157894736842105, "acc_stderr": 0.0315469804508223, "acc_norm": 0.8157894736842105, "acc_norm_stderr": 0.0315469804508223 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.73, "acc_stderr": 0.04461960433384741, "acc_norm": 0.73, "acc_norm_stderr": 0.04461960433384741 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.7056603773584905, "acc_stderr": 0.02804918631569525, "acc_norm": 0.7056603773584905, "acc_norm_stderr": 0.02804918631569525 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.8263888888888888, "acc_stderr": 0.03167473383795718, "acc_norm": 0.8263888888888888, "acc_norm_stderr": 0.03167473383795718 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.46, "acc_stderr": 0.05009082659620332, "acc_norm": 0.46, "acc_norm_stderr": 0.05009082659620332 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.57, "acc_stderr": 0.049756985195624284, "acc_norm": 0.57, "acc_norm_stderr": 0.049756985195624284 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.38, "acc_stderr": 0.048783173121456316, "acc_norm": 0.38, "acc_norm_stderr": 0.048783173121456316 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.6589595375722543, "acc_stderr": 0.036146654241808254, "acc_norm": 0.6589595375722543, "acc_norm_stderr": 0.036146654241808254 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.4117647058823529, "acc_stderr": 0.048971049527263666, "acc_norm": 0.4117647058823529, "acc_norm_stderr": 0.048971049527263666 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.8, "acc_stderr": 0.04020151261036846, "acc_norm": 0.8, "acc_norm_stderr": 0.04020151261036846 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.6851063829787234, "acc_stderr": 0.030363582197238167, "acc_norm": 0.6851063829787234, "acc_norm_stderr": 0.030363582197238167 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.4298245614035088, "acc_stderr": 0.046570472605949625, "acc_norm": 0.4298245614035088, "acc_norm_stderr": 0.046570472605949625 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.6137931034482759, "acc_stderr": 0.04057324734419035, "acc_norm": 0.6137931034482759, "acc_norm_stderr": 0.04057324734419035 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.4497354497354497, "acc_stderr": 0.025620857042936655, "acc_norm": 0.4497354497354497, "acc_norm_stderr": 0.025620857042936655 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.4523809523809524, "acc_stderr": 0.044518079590553275, "acc_norm": 0.4523809523809524, "acc_norm_stderr": 0.044518079590553275 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.49, "acc_stderr": 0.05024183937956912, "acc_norm": 0.49, "acc_norm_stderr": 0.05024183937956912 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.8193548387096774, "acc_stderr": 0.021886178567172527, "acc_norm": 0.8193548387096774, "acc_norm_stderr": 0.021886178567172527 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.5517241379310345, "acc_stderr": 0.03499113137676744, "acc_norm": 0.5517241379310345, "acc_norm_stderr": 0.03499113137676744 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.76, "acc_stderr": 0.042923469599092816, "acc_norm": 0.76, "acc_norm_stderr": 0.042923469599092816 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.8242424242424242, "acc_stderr": 0.02972094300622445, "acc_norm": 0.8242424242424242, "acc_norm_stderr": 0.02972094300622445 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.8888888888888888, "acc_stderr": 0.02239078763821677, "acc_norm": 0.8888888888888888, "acc_norm_stderr": 0.02239078763821677 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.9430051813471503, "acc_stderr": 0.01673108529360755, "acc_norm": 0.9430051813471503, "acc_norm_stderr": 0.01673108529360755 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.7205128205128205, "acc_stderr": 0.022752388839776826, "acc_norm": 0.7205128205128205, "acc_norm_stderr": 0.022752388839776826 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.35185185185185186, "acc_stderr": 0.029116617606083015, "acc_norm": 0.35185185185185186, "acc_norm_stderr": 0.029116617606083015 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.7478991596638656, "acc_stderr": 0.028205545033277723, "acc_norm": 0.7478991596638656, "acc_norm_stderr": 0.028205545033277723 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.46357615894039733, "acc_stderr": 0.04071636065944215, "acc_norm": 0.46357615894039733, "acc_norm_stderr": 0.04071636065944215 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.8825688073394495, "acc_stderr": 0.013802780227377352, "acc_norm": 0.8825688073394495, "acc_norm_stderr": 0.013802780227377352 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.5462962962962963, "acc_stderr": 0.033953227263757976, "acc_norm": 0.5462962962962963, "acc_norm_stderr": 0.033953227263757976 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.9215686274509803, "acc_stderr": 0.018869514646658928, "acc_norm": 0.9215686274509803, "acc_norm_stderr": 0.018869514646658928 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.9029535864978903, "acc_stderr": 0.019269323025640266, "acc_norm": 0.9029535864978903, "acc_norm_stderr": 0.019269323025640266 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.7892376681614349, "acc_stderr": 0.02737309550054019, "acc_norm": 0.7892376681614349, "acc_norm_stderr": 0.02737309550054019 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.8473282442748091, "acc_stderr": 0.031545216720054725, "acc_norm": 0.8473282442748091, "acc_norm_stderr": 0.031545216720054725 }, "harness|hendrycksTest-international_law|5": { "acc": 0.8512396694214877, "acc_stderr": 0.032484700838071943, "acc_norm": 0.8512396694214877, "acc_norm_stderr": 0.032484700838071943 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.8148148148148148, "acc_stderr": 0.03755265865037181, "acc_norm": 0.8148148148148148, "acc_norm_stderr": 0.03755265865037181 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.8159509202453987, "acc_stderr": 0.03044677768797173, "acc_norm": 0.8159509202453987, "acc_norm_stderr": 0.03044677768797173 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.48214285714285715, "acc_stderr": 0.047427623612430116, "acc_norm": 0.48214285714285715, "acc_norm_stderr": 0.047427623612430116 }, "harness|hendrycksTest-management|5": { "acc": 0.8058252427184466, "acc_stderr": 0.03916667762822582, "acc_norm": 0.8058252427184466, "acc_norm_stderr": 0.03916667762822582 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8846153846153846, "acc_stderr": 0.02093019318517933, "acc_norm": 0.8846153846153846, "acc_norm_stderr": 0.02093019318517933 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.72, "acc_stderr": 0.04512608598542128, "acc_norm": 0.72, "acc_norm_stderr": 0.04512608598542128 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.8659003831417624, "acc_stderr": 0.012185528166499978, "acc_norm": 0.8659003831417624, "acc_norm_stderr": 0.012185528166499978 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.7861271676300579, "acc_stderr": 0.022075709251757177, "acc_norm": 0.7861271676300579, "acc_norm_stderr": 0.022075709251757177 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.5865921787709497, "acc_stderr": 0.016469814928406164, "acc_norm": 0.5865921787709497, "acc_norm_stderr": 0.016469814928406164 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.7450980392156863, "acc_stderr": 0.024954184324879905, "acc_norm": 0.7450980392156863, "acc_norm_stderr": 0.024954184324879905 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.7684887459807074, "acc_stderr": 0.023956532766639133, "acc_norm": 0.7684887459807074, "acc_norm_stderr": 0.023956532766639133 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.8271604938271605, "acc_stderr": 0.021038517770157375, "acc_norm": 0.8271604938271605, "acc_norm_stderr": 0.021038517770157375 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.5212765957446809, "acc_stderr": 0.029800481645628693, "acc_norm": 0.5212765957446809, "acc_norm_stderr": 0.029800481645628693 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.546284224250326, "acc_stderr": 0.012715404841277752, "acc_norm": 0.546284224250326, "acc_norm_stderr": 0.012715404841277752 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.7132352941176471, "acc_stderr": 0.027472274473233818, "acc_norm": 0.7132352941176471, "acc_norm_stderr": 0.027472274473233818 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.7565359477124183, "acc_stderr": 0.017362473762146616, "acc_norm": 0.7565359477124183, "acc_norm_stderr": 0.017362473762146616 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.7090909090909091, "acc_stderr": 0.04350271442923243, "acc_norm": 0.7090909090909091, "acc_norm_stderr": 0.04350271442923243 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.8, "acc_stderr": 0.02560737598657916, "acc_norm": 0.8, "acc_norm_stderr": 0.02560737598657916 }, "harness|hendrycksTest-sociology|5": { "acc": 0.8756218905472637, "acc_stderr": 0.023335401790166327, "acc_norm": 0.8756218905472637, "acc_norm_stderr": 0.023335401790166327 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.92, "acc_stderr": 0.0272659924344291, "acc_norm": 0.92, "acc_norm_stderr": 0.0272659924344291 }, "harness|hendrycksTest-virology|5": { "acc": 0.5421686746987951, "acc_stderr": 0.0387862677100236, "acc_norm": 0.5421686746987951, "acc_norm_stderr": 0.0387862677100236 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.8654970760233918, "acc_stderr": 0.0261682213446623, "acc_norm": 0.8654970760233918, "acc_norm_stderr": 0.0261682213446623 }, "harness|truthfulqa:mc|0": { "mc1": 0.40514075887392903, "mc1_stderr": 0.017185611727753368, "mc2": 0.6014420746258905, "mc2_stderr": 0.015084429602270856 }, "harness|winogrande|5": { "acc": 0.8105761641673244, "acc_stderr": 0.011012790432989248 }, "harness|gsm8k|5": { "acc": 0.40788476118271416, "acc_stderr": 0.013536742075643085 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_TeeZee__Xwin-LM-70B-V0.1_Jannie
[ "region:us" ]
2024-01-25T11:02:25+00:00
{"pretty_name": "Evaluation run of TeeZee/Xwin-LM-70B-V0.1_Jannie", "dataset_summary": "Dataset automatically created during the evaluation run of model [TeeZee/Xwin-LM-70B-V0.1_Jannie](https://huggingface.co/TeeZee/Xwin-LM-70B-V0.1_Jannie) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_TeeZee__Xwin-LM-70B-V0.1_Jannie\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-01-25T10:59:59.024492](https://huggingface.co/datasets/open-llm-leaderboard/details_TeeZee__Xwin-LM-70B-V0.1_Jannie/blob/main/results_2024-01-25T10-59-59.024492.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.6920145646239103,\n \"acc_stderr\": 0.03050636265571641,\n \"acc_norm\": 0.6988351160849339,\n \"acc_norm_stderr\": 0.031092908305197647,\n \"mc1\": 0.40514075887392903,\n \"mc1_stderr\": 0.017185611727753368,\n \"mc2\": 0.6014420746258905,\n \"mc2_stderr\": 0.015084429602270856\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.6629692832764505,\n \"acc_stderr\": 0.013813476652902276,\n \"acc_norm\": 0.71160409556314,\n \"acc_norm_stderr\": 0.013238394422428178\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6803425612427804,\n \"acc_stderr\": 0.004653907471785645,\n \"acc_norm\": 0.8685520812587134,\n \"acc_norm_stderr\": 0.0033719902188524583\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.37,\n \"acc_stderr\": 0.04852365870939099,\n \"acc_norm\": 0.37,\n \"acc_norm_stderr\": 0.04852365870939099\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.6370370370370371,\n \"acc_stderr\": 0.04153948404742399,\n \"acc_norm\": 0.6370370370370371,\n \"acc_norm_stderr\": 0.04153948404742399\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.8157894736842105,\n \"acc_stderr\": 0.0315469804508223,\n \"acc_norm\": 0.8157894736842105,\n \"acc_norm_stderr\": 0.0315469804508223\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.73,\n \"acc_stderr\": 0.04461960433384741,\n \"acc_norm\": 0.73,\n \"acc_norm_stderr\": 0.04461960433384741\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.7056603773584905,\n \"acc_stderr\": 0.02804918631569525,\n \"acc_norm\": 0.7056603773584905,\n \"acc_norm_stderr\": 0.02804918631569525\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.8263888888888888,\n \"acc_stderr\": 0.03167473383795718,\n \"acc_norm\": 0.8263888888888888,\n \"acc_norm_stderr\": 0.03167473383795718\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.46,\n \"acc_stderr\": 0.05009082659620332,\n \"acc_norm\": 0.46,\n \"acc_norm_stderr\": 0.05009082659620332\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.57,\n \"acc_stderr\": 0.049756985195624284,\n \"acc_norm\": 0.57,\n \"acc_norm_stderr\": 0.049756985195624284\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.38,\n \"acc_stderr\": 0.048783173121456316,\n \"acc_norm\": 0.38,\n \"acc_norm_stderr\": 0.048783173121456316\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.6589595375722543,\n \"acc_stderr\": 0.036146654241808254,\n \"acc_norm\": 0.6589595375722543,\n \"acc_norm_stderr\": 0.036146654241808254\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.4117647058823529,\n \"acc_stderr\": 0.048971049527263666,\n \"acc_norm\": 0.4117647058823529,\n \"acc_norm_stderr\": 0.048971049527263666\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.8,\n \"acc_stderr\": 0.04020151261036846,\n \"acc_norm\": 0.8,\n \"acc_norm_stderr\": 0.04020151261036846\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.6851063829787234,\n \"acc_stderr\": 0.030363582197238167,\n \"acc_norm\": 0.6851063829787234,\n \"acc_norm_stderr\": 0.030363582197238167\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.4298245614035088,\n \"acc_stderr\": 0.046570472605949625,\n \"acc_norm\": 0.4298245614035088,\n \"acc_norm_stderr\": 0.046570472605949625\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.6137931034482759,\n \"acc_stderr\": 0.04057324734419035,\n \"acc_norm\": 0.6137931034482759,\n \"acc_norm_stderr\": 0.04057324734419035\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.4497354497354497,\n \"acc_stderr\": 0.025620857042936655,\n \"acc_norm\": 0.4497354497354497,\n \"acc_norm_stderr\": 0.025620857042936655\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.4523809523809524,\n \"acc_stderr\": 0.044518079590553275,\n \"acc_norm\": 0.4523809523809524,\n \"acc_norm_stderr\": 0.044518079590553275\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.49,\n \"acc_stderr\": 0.05024183937956912,\n \"acc_norm\": 0.49,\n \"acc_norm_stderr\": 0.05024183937956912\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.8193548387096774,\n \"acc_stderr\": 0.021886178567172527,\n \"acc_norm\": 0.8193548387096774,\n \"acc_norm_stderr\": 0.021886178567172527\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.5517241379310345,\n \"acc_stderr\": 0.03499113137676744,\n \"acc_norm\": 0.5517241379310345,\n \"acc_norm_stderr\": 0.03499113137676744\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.76,\n \"acc_stderr\": 0.042923469599092816,\n \"acc_norm\": 0.76,\n \"acc_norm_stderr\": 0.042923469599092816\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.8242424242424242,\n \"acc_stderr\": 0.02972094300622445,\n \"acc_norm\": 0.8242424242424242,\n \"acc_norm_stderr\": 0.02972094300622445\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.8888888888888888,\n \"acc_stderr\": 0.02239078763821677,\n \"acc_norm\": 0.8888888888888888,\n \"acc_norm_stderr\": 0.02239078763821677\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.9430051813471503,\n \"acc_stderr\": 0.01673108529360755,\n \"acc_norm\": 0.9430051813471503,\n \"acc_norm_stderr\": 0.01673108529360755\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.7205128205128205,\n \"acc_stderr\": 0.022752388839776826,\n \"acc_norm\": 0.7205128205128205,\n \"acc_norm_stderr\": 0.022752388839776826\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.35185185185185186,\n \"acc_stderr\": 0.029116617606083015,\n \"acc_norm\": 0.35185185185185186,\n \"acc_norm_stderr\": 0.029116617606083015\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.7478991596638656,\n \"acc_stderr\": 0.028205545033277723,\n \"acc_norm\": 0.7478991596638656,\n \"acc_norm_stderr\": 0.028205545033277723\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.46357615894039733,\n \"acc_stderr\": 0.04071636065944215,\n \"acc_norm\": 0.46357615894039733,\n \"acc_norm_stderr\": 0.04071636065944215\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.8825688073394495,\n \"acc_stderr\": 0.013802780227377352,\n \"acc_norm\": 0.8825688073394495,\n \"acc_norm_stderr\": 0.013802780227377352\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.5462962962962963,\n \"acc_stderr\": 0.033953227263757976,\n \"acc_norm\": 0.5462962962962963,\n \"acc_norm_stderr\": 0.033953227263757976\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.9215686274509803,\n \"acc_stderr\": 0.018869514646658928,\n \"acc_norm\": 0.9215686274509803,\n \"acc_norm_stderr\": 0.018869514646658928\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.9029535864978903,\n \"acc_stderr\": 0.019269323025640266,\n \"acc_norm\": 0.9029535864978903,\n \"acc_norm_stderr\": 0.019269323025640266\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.7892376681614349,\n \"acc_stderr\": 0.02737309550054019,\n \"acc_norm\": 0.7892376681614349,\n \"acc_norm_stderr\": 0.02737309550054019\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.8473282442748091,\n \"acc_stderr\": 0.031545216720054725,\n \"acc_norm\": 0.8473282442748091,\n \"acc_norm_stderr\": 0.031545216720054725\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.8512396694214877,\n \"acc_stderr\": 0.032484700838071943,\n \"acc_norm\": 0.8512396694214877,\n \"acc_norm_stderr\": 0.032484700838071943\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.8148148148148148,\n \"acc_stderr\": 0.03755265865037181,\n \"acc_norm\": 0.8148148148148148,\n \"acc_norm_stderr\": 0.03755265865037181\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.8159509202453987,\n \"acc_stderr\": 0.03044677768797173,\n \"acc_norm\": 0.8159509202453987,\n \"acc_norm_stderr\": 0.03044677768797173\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.48214285714285715,\n \"acc_stderr\": 0.047427623612430116,\n \"acc_norm\": 0.48214285714285715,\n \"acc_norm_stderr\": 0.047427623612430116\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.8058252427184466,\n \"acc_stderr\": 0.03916667762822582,\n \"acc_norm\": 0.8058252427184466,\n \"acc_norm_stderr\": 0.03916667762822582\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8846153846153846,\n \"acc_stderr\": 0.02093019318517933,\n \"acc_norm\": 0.8846153846153846,\n \"acc_norm_stderr\": 0.02093019318517933\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.72,\n \"acc_stderr\": 0.04512608598542128,\n \"acc_norm\": 0.72,\n \"acc_norm_stderr\": 0.04512608598542128\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.8659003831417624,\n \"acc_stderr\": 0.012185528166499978,\n \"acc_norm\": 0.8659003831417624,\n \"acc_norm_stderr\": 0.012185528166499978\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.7861271676300579,\n \"acc_stderr\": 0.022075709251757177,\n \"acc_norm\": 0.7861271676300579,\n \"acc_norm_stderr\": 0.022075709251757177\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.5865921787709497,\n \"acc_stderr\": 0.016469814928406164,\n \"acc_norm\": 0.5865921787709497,\n \"acc_norm_stderr\": 0.016469814928406164\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.7450980392156863,\n \"acc_stderr\": 0.024954184324879905,\n \"acc_norm\": 0.7450980392156863,\n \"acc_norm_stderr\": 0.024954184324879905\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.7684887459807074,\n \"acc_stderr\": 0.023956532766639133,\n \"acc_norm\": 0.7684887459807074,\n \"acc_norm_stderr\": 0.023956532766639133\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.8271604938271605,\n \"acc_stderr\": 0.021038517770157375,\n \"acc_norm\": 0.8271604938271605,\n \"acc_norm_stderr\": 0.021038517770157375\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.5212765957446809,\n \"acc_stderr\": 0.029800481645628693,\n \"acc_norm\": 0.5212765957446809,\n \"acc_norm_stderr\": 0.029800481645628693\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.546284224250326,\n \"acc_stderr\": 0.012715404841277752,\n \"acc_norm\": 0.546284224250326,\n \"acc_norm_stderr\": 0.012715404841277752\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.7132352941176471,\n \"acc_stderr\": 0.027472274473233818,\n \"acc_norm\": 0.7132352941176471,\n \"acc_norm_stderr\": 0.027472274473233818\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.7565359477124183,\n \"acc_stderr\": 0.017362473762146616,\n \"acc_norm\": 0.7565359477124183,\n \"acc_norm_stderr\": 0.017362473762146616\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.7090909090909091,\n \"acc_stderr\": 0.04350271442923243,\n \"acc_norm\": 0.7090909090909091,\n \"acc_norm_stderr\": 0.04350271442923243\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.8,\n \"acc_stderr\": 0.02560737598657916,\n \"acc_norm\": 0.8,\n \"acc_norm_stderr\": 0.02560737598657916\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.8756218905472637,\n \"acc_stderr\": 0.023335401790166327,\n \"acc_norm\": 0.8756218905472637,\n \"acc_norm_stderr\": 0.023335401790166327\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.92,\n \"acc_stderr\": 0.0272659924344291,\n \"acc_norm\": 0.92,\n \"acc_norm_stderr\": 0.0272659924344291\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5421686746987951,\n \"acc_stderr\": 0.0387862677100236,\n \"acc_norm\": 0.5421686746987951,\n \"acc_norm_stderr\": 0.0387862677100236\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.8654970760233918,\n \"acc_stderr\": 0.0261682213446623,\n \"acc_norm\": 0.8654970760233918,\n \"acc_norm_stderr\": 0.0261682213446623\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.40514075887392903,\n \"mc1_stderr\": 0.017185611727753368,\n \"mc2\": 0.6014420746258905,\n \"mc2_stderr\": 0.015084429602270856\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.8105761641673244,\n \"acc_stderr\": 0.011012790432989248\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.40788476118271416,\n \"acc_stderr\": 0.013536742075643085\n }\n}\n```", "repo_url": "https://huggingface.co/TeeZee/Xwin-LM-70B-V0.1_Jannie", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_01_25T10_59_59.024492", "path": ["**/details_harness|arc:challenge|25_2024-01-25T10-59-59.024492.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-01-25T10-59-59.024492.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_01_25T10_59_59.024492", "path": ["**/details_harness|gsm8k|5_2024-01-25T10-59-59.024492.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-01-25T10-59-59.024492.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_01_25T10_59_59.024492", "path": ["**/details_harness|hellaswag|10_2024-01-25T10-59-59.024492.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-01-25T10-59-59.024492.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_01_25T10_59_59.024492", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-25T10-59-59.024492.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-25T10-59-59.024492.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-25T10-59-59.024492.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-25T10-59-59.024492.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-25T10-59-59.024492.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-25T10-59-59.024492.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-25T10-59-59.024492.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-25T10-59-59.024492.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-25T10-59-59.024492.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-25T10-59-59.024492.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-25T10-59-59.024492.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-25T10-59-59.024492.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-25T10-59-59.024492.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-25T10-59-59.024492.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-25T10-59-59.024492.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-25T10-59-59.024492.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-25T10-59-59.024492.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-25T10-59-59.024492.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-25T10-59-59.024492.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-25T10-59-59.024492.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-25T10-59-59.024492.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-25T10-59-59.024492.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-25T10-59-59.024492.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-25T10-59-59.024492.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-25T10-59-59.024492.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-25T10-59-59.024492.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-25T10-59-59.024492.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-25T10-59-59.024492.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-25T10-59-59.024492.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-25T10-59-59.024492.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-25T10-59-59.024492.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-25T10-59-59.024492.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-25T10-59-59.024492.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-25T10-59-59.024492.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-25T10-59-59.024492.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-25T10-59-59.024492.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-25T10-59-59.024492.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-25T10-59-59.024492.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-25T10-59-59.024492.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-25T10-59-59.024492.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-25T10-59-59.024492.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-25T10-59-59.024492.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-25T10-59-59.024492.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-25T10-59-59.024492.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-25T10-59-59.024492.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-25T10-59-59.024492.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-25T10-59-59.024492.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-25T10-59-59.024492.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-25T10-59-59.024492.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-25T10-59-59.024492.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-25T10-59-59.024492.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-25T10-59-59.024492.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-25T10-59-59.024492.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-25T10-59-59.024492.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-25T10-59-59.024492.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-25T10-59-59.024492.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-25T10-59-59.024492.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-25T10-59-59.024492.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-25T10-59-59.024492.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-25T10-59-59.024492.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-25T10-59-59.024492.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-25T10-59-59.024492.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-25T10-59-59.024492.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-25T10-59-59.024492.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-25T10-59-59.024492.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-25T10-59-59.024492.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-25T10-59-59.024492.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-25T10-59-59.024492.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-25T10-59-59.024492.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-25T10-59-59.024492.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-25T10-59-59.024492.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-25T10-59-59.024492.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-25T10-59-59.024492.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-25T10-59-59.024492.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-25T10-59-59.024492.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-25T10-59-59.024492.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-25T10-59-59.024492.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-25T10-59-59.024492.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-25T10-59-59.024492.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-25T10-59-59.024492.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-25T10-59-59.024492.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-25T10-59-59.024492.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-25T10-59-59.024492.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-25T10-59-59.024492.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-25T10-59-59.024492.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-25T10-59-59.024492.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-25T10-59-59.024492.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-25T10-59-59.024492.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-25T10-59-59.024492.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-25T10-59-59.024492.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-25T10-59-59.024492.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-25T10-59-59.024492.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-25T10-59-59.024492.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-25T10-59-59.024492.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-25T10-59-59.024492.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-25T10-59-59.024492.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-25T10-59-59.024492.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-25T10-59-59.024492.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-25T10-59-59.024492.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-25T10-59-59.024492.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-25T10-59-59.024492.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-25T10-59-59.024492.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-25T10-59-59.024492.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-25T10-59-59.024492.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-25T10-59-59.024492.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-25T10-59-59.024492.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-25T10-59-59.024492.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-25T10-59-59.024492.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-25T10-59-59.024492.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-25T10-59-59.024492.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-25T10-59-59.024492.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-25T10-59-59.024492.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-25T10-59-59.024492.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-25T10-59-59.024492.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_01_25T10_59_59.024492", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-25T10-59-59.024492.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-25T10-59-59.024492.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_01_25T10_59_59.024492", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-25T10-59-59.024492.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-25T10-59-59.024492.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_01_25T10_59_59.024492", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-25T10-59-59.024492.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-25T10-59-59.024492.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_01_25T10_59_59.024492", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-25T10-59-59.024492.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-25T10-59-59.024492.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_01_25T10_59_59.024492", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-25T10-59-59.024492.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-25T10-59-59.024492.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_01_25T10_59_59.024492", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-25T10-59-59.024492.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-25T10-59-59.024492.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_01_25T10_59_59.024492", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-25T10-59-59.024492.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-25T10-59-59.024492.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_01_25T10_59_59.024492", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-25T10-59-59.024492.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-25T10-59-59.024492.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_01_25T10_59_59.024492", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-25T10-59-59.024492.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-25T10-59-59.024492.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_01_25T10_59_59.024492", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-25T10-59-59.024492.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-25T10-59-59.024492.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_01_25T10_59_59.024492", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-25T10-59-59.024492.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-25T10-59-59.024492.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_01_25T10_59_59.024492", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-25T10-59-59.024492.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-25T10-59-59.024492.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_01_25T10_59_59.024492", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-25T10-59-59.024492.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-25T10-59-59.024492.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_01_25T10_59_59.024492", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-25T10-59-59.024492.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-25T10-59-59.024492.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_01_25T10_59_59.024492", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-25T10-59-59.024492.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-25T10-59-59.024492.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_01_25T10_59_59.024492", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-25T10-59-59.024492.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-25T10-59-59.024492.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_01_25T10_59_59.024492", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-25T10-59-59.024492.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-25T10-59-59.024492.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_01_25T10_59_59.024492", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-25T10-59-59.024492.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-25T10-59-59.024492.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_01_25T10_59_59.024492", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-25T10-59-59.024492.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-25T10-59-59.024492.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_01_25T10_59_59.024492", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-25T10-59-59.024492.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-25T10-59-59.024492.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_01_25T10_59_59.024492", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-25T10-59-59.024492.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-25T10-59-59.024492.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_01_25T10_59_59.024492", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-25T10-59-59.024492.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-25T10-59-59.024492.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_01_25T10_59_59.024492", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-25T10-59-59.024492.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-25T10-59-59.024492.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_01_25T10_59_59.024492", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-25T10-59-59.024492.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-25T10-59-59.024492.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_01_25T10_59_59.024492", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-25T10-59-59.024492.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-25T10-59-59.024492.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_01_25T10_59_59.024492", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-25T10-59-59.024492.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-25T10-59-59.024492.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_01_25T10_59_59.024492", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-25T10-59-59.024492.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-25T10-59-59.024492.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_01_25T10_59_59.024492", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-25T10-59-59.024492.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-25T10-59-59.024492.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_01_25T10_59_59.024492", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-25T10-59-59.024492.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-25T10-59-59.024492.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_01_25T10_59_59.024492", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-25T10-59-59.024492.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-25T10-59-59.024492.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_01_25T10_59_59.024492", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-25T10-59-59.024492.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-25T10-59-59.024492.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_01_25T10_59_59.024492", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-25T10-59-59.024492.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-25T10-59-59.024492.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_01_25T10_59_59.024492", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-25T10-59-59.024492.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-25T10-59-59.024492.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_01_25T10_59_59.024492", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-25T10-59-59.024492.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-25T10-59-59.024492.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_01_25T10_59_59.024492", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-25T10-59-59.024492.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-25T10-59-59.024492.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_01_25T10_59_59.024492", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-25T10-59-59.024492.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-25T10-59-59.024492.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_01_25T10_59_59.024492", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-25T10-59-59.024492.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-25T10-59-59.024492.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_01_25T10_59_59.024492", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-25T10-59-59.024492.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-25T10-59-59.024492.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_01_25T10_59_59.024492", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-25T10-59-59.024492.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-25T10-59-59.024492.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_01_25T10_59_59.024492", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-25T10-59-59.024492.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-25T10-59-59.024492.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_01_25T10_59_59.024492", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-25T10-59-59.024492.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-25T10-59-59.024492.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_01_25T10_59_59.024492", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-25T10-59-59.024492.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-25T10-59-59.024492.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_01_25T10_59_59.024492", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-25T10-59-59.024492.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-25T10-59-59.024492.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_01_25T10_59_59.024492", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-25T10-59-59.024492.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-25T10-59-59.024492.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_01_25T10_59_59.024492", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-25T10-59-59.024492.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-25T10-59-59.024492.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_01_25T10_59_59.024492", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-25T10-59-59.024492.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-25T10-59-59.024492.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_01_25T10_59_59.024492", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-25T10-59-59.024492.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-25T10-59-59.024492.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_01_25T10_59_59.024492", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-25T10-59-59.024492.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-25T10-59-59.024492.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_01_25T10_59_59.024492", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-25T10-59-59.024492.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-25T10-59-59.024492.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_01_25T10_59_59.024492", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-25T10-59-59.024492.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-25T10-59-59.024492.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_01_25T10_59_59.024492", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-25T10-59-59.024492.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-25T10-59-59.024492.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_01_25T10_59_59.024492", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-25T10-59-59.024492.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-25T10-59-59.024492.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_01_25T10_59_59.024492", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-25T10-59-59.024492.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-25T10-59-59.024492.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_01_25T10_59_59.024492", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-25T10-59-59.024492.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-25T10-59-59.024492.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_01_25T10_59_59.024492", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-25T10-59-59.024492.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-25T10-59-59.024492.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_01_25T10_59_59.024492", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-25T10-59-59.024492.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-25T10-59-59.024492.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_01_25T10_59_59.024492", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-25T10-59-59.024492.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-25T10-59-59.024492.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_01_25T10_59_59.024492", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-25T10-59-59.024492.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-25T10-59-59.024492.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_01_25T10_59_59.024492", "path": ["**/details_harness|winogrande|5_2024-01-25T10-59-59.024492.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-01-25T10-59-59.024492.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_01_25T10_59_59.024492", "path": ["results_2024-01-25T10-59-59.024492.parquet"]}, {"split": "latest", "path": ["results_2024-01-25T10-59-59.024492.parquet"]}]}]}
2024-01-25T11:02:54+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of TeeZee/Xwin-LM-70B-V0.1_Jannie Dataset automatically created during the evaluation run of model TeeZee/Xwin-LM-70B-V0.1_Jannie on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-01-25T10:59:59.024492(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of TeeZee/Xwin-LM-70B-V0.1_Jannie\n\n\n\nDataset automatically created during the evaluation run of model TeeZee/Xwin-LM-70B-V0.1_Jannie on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-25T10:59:59.024492(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of TeeZee/Xwin-LM-70B-V0.1_Jannie\n\n\n\nDataset automatically created during the evaluation run of model TeeZee/Xwin-LM-70B-V0.1_Jannie on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-25T10:59:59.024492(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ 6, 193, 68, 4, 40, 29, 3, 4, 9, 6, 5, 7, 4, 7, 10, 9, 5, 9, 8, 10, 46, 8, 7, 10, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of TeeZee/Xwin-LM-70B-V0.1_Jannie\n\n\n\nDataset automatically created during the evaluation run of model TeeZee/Xwin-LM-70B-V0.1_Jannie on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2024-01-25T10:59:59.024492(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):## Dataset Details### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:## Uses### Direct Use### Out-of-Scope Use## Dataset Structure## Dataset Creation### Curation Rationale### Source Data#### Data Collection and Processing#### Who are the source data producers?### Annotations [optional]#### Annotation process#### Who are the annotators?#### Personal and Sensitive Information## Bias, Risks, and Limitations### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:## Glossary [optional]## More Information [optional]## Dataset Card Authors [optional]" ]
[ -0.05218498408794403, 0.21819724142551422, -0.0042756423354148865, 0.035319723188877106, 0.08579106628894806, -0.005146522540599108, 0.008566220290958881, 0.12256090342998505, -0.0019778411369770765, 0.17316368222236633, -0.005717667285352945, 0.09759427607059479, 0.08308762311935425, 0.13013923168182373, 0.029168158769607544, -0.1363486498594284, 0.011037920601665974, -0.06426212191581726, 0.0700102299451828, 0.08481498062610626, 0.088210329413414, -0.07672879099845886, 0.05897928774356842, -0.052865900099277496, -0.012519333511590958, 0.01303272694349289, -0.09313352406024933, -0.03920891880989075, 0.09313568472862244, 0.09221313893795013, 0.0271134115755558, -0.012586487457156181, 0.024609040468931198, -0.2606595456600189, 0.012271841987967491, 0.09309456497430801, -0.006250456906855106, 0.03665592893958092, 0.11518286168575287, -0.07750500738620758, 0.04276129603385925, -0.10413183271884918, 0.06817828863859177, 0.042161643505096436, -0.11800957471132278, -0.14801061153411865, -0.1344648003578186, 0.020070303231477737, 0.06450385600328445, 0.04969734698534012, -0.026402547955513, 0.15807610750198364, -0.023663083091378212, 0.051709048449993134, 0.12937119603157043, -0.10400091111660004, -0.020123742520809174, 0.04025384783744812, 0.046824317425489426, 0.07227024435997009, -0.09253229200839996, 0.0022550830617547035, 0.041012056171894073, 0.044230952858924866, 0.022204609587788582, -0.0017576571553945541, -0.06583835184574127, 0.01902490109205246, -0.1321401596069336, -0.11591994762420654, 0.19151835143566132, 0.007674960885196924, -0.038041651248931885, -0.16468198597431183, -0.024412892758846283, -0.0007647592574357986, -0.002828999888151884, -0.04584181308746338, 0.009216271340847015, -0.02709444798529148, 0.0700632631778717, -0.031117398291826248, -0.09615689516067505, -0.011994335800409317, 0.014094248414039612, 0.039594866335392, 0.01775972917675972, -0.021022701635956764, -0.0018974049016833305, 0.1131644994020462, -0.023608187213540077, -0.09079965949058533, -0.07452797144651413, -0.04273790866136551, -0.12489300221204758, -0.047054607421159744, 0.020864158868789673, -0.058619894087314606, 0.029409725219011307, 0.22037334740161896, -0.04039721563458443, 0.037797197699546814, -0.09610038250684738, 0.00043697154615074396, 0.12076140940189362, 0.060729123651981354, -0.06837455183267593, -0.09233139455318451, -0.017598962411284447, 0.008276570588350296, 0.030683904886245728, -0.020970933139324188, 0.005843187682330608, 0.06496365368366241, 0.06347648054361343, 0.12409740686416626, 0.12480340898036957, 0.02409174293279648, -0.058922119438648224, -0.02078329399228096, 0.2116374522447586, -0.1484149694442749, 0.00022348714992403984, -0.002997715724632144, -0.03252684324979782, -0.11185798048973083, 0.07778908312320709, 0.0038999109528958797, -0.05883927643299103, 0.1067252904176712, -0.050431013107299805, -0.08037732541561127, -0.06412327289581299, -0.05826598405838013, 0.06844758987426758, -0.024507611989974976, -0.022678878158330917, -0.07413361966609955, -0.11882860958576202, -0.07594127953052521, 0.01974235288798809, -0.06706605851650238, -0.037045758217573166, 0.03903624415397644, -0.018396858125925064, -0.016029931604862213, -0.01746080443263054, 0.1329241245985031, -0.05005396902561188, 0.03357233107089996, 0.005995950661599636, 0.009886364452540874, 0.07952293753623962, 0.04679670184850693, -0.1264897882938385, 0.0819651335477829, -0.10289302468299866, 0.10206955671310425, -0.10805109143257141, -0.012149346061050892, -0.14872030913829803, -0.010113950818777084, -0.025978265330195427, 0.012233322486281395, 0.005513936281204224, 0.09315556287765503, -0.24394184350967407, 0.01669752597808838, 0.11839281022548676, -0.10112738609313965, -0.10450847446918488, 0.07637839764356613, -0.040120914578437805, 0.09650829434394836, 0.05959402769804001, 0.11079917103052139, 0.11991201341152191, -0.09516822546720505, -0.11338852345943451, -0.07043999433517456, -0.03049595095217228, 0.14591410756111145, 0.05656613036990166, -0.05443442240357399, 0.11398421227931976, 0.04413996636867523, -0.01950790546834469, -0.09905916452407837, -0.015575256198644638, -0.06747320294380188, -0.026861973106861115, -0.048468515276908875, -0.07879379391670227, 0.009287502616643906, -0.08841122686862946, -0.03136478364467621, -0.09817037731409073, 0.030538903549313545, 0.08755463361740112, -0.021414071321487427, 0.01910737156867981, -0.05804663151502609, 0.035735487937927246, 0.0005646374775096774, 0.025481093674898148, -0.21118709444999695, -0.10488177835941315, 0.03970146179199219, -0.1370113343000412, 0.06096704304218292, 0.03938070684671402, 0.01125213224440813, 0.042382314801216125, -0.018207550048828125, 0.015821002423763275, 0.015017509460449219, 0.004439851734787226, -0.015077407471835613, -0.14700466394424438, -0.05847188085317612, -0.07914867252111435, 0.09789296984672546, -0.13027995824813843, -0.020804625004529953, 0.0787581354379654, 0.15270668268203735, -0.002448311075568199, -0.07438129186630249, 0.07377050071954727, 0.002203194657340646, -0.03352183848619461, -0.06730575859546661, 0.0033666302915662527, -0.022685639560222626, 0.04478742182254791, 0.027232278138399124, -0.19859415292739868, -0.14791688323020935, 0.07710389792919159, 0.13027052581310272, -0.07935338467359543, -0.057111017405986786, -0.06011650338768959, -0.06702515482902527, -0.08333351463079453, -0.07895234227180481, 0.07392945885658264, 0.07161512225866318, 0.03094961866736412, -0.07492213696241379, -0.0738149881362915, 0.011190381832420826, 0.048949576914310455, -0.07437325268983841, 0.10238054394721985, 0.06870323419570923, -0.11228290945291519, 0.10046949237585068, -0.004671117290854454, 0.1322447657585144, 0.07215672731399536, 0.028556717559695244, -0.09024196863174438, -0.012343218550086021, 0.04339628666639328, 0.029655076563358307, 0.08918781578540802, -0.04522475227713585, 0.03305633366107941, 0.07780006527900696, -0.014874313957989216, 0.042798444628715515, -0.050068214535713196, 0.029011091217398643, 0.044824548065662384, -0.0015616625314578414, 0.022949811071157455, 0.0017277784645557404, 0.0012633409351110458, 0.06446821987628937, 0.02588288113474846, 0.11594602465629578, -0.015480583533644676, -0.035469599068164825, -0.10162067413330078, 0.12964913249015808, -0.09618600457906723, -0.2992134392261505, -0.14177173376083374, -0.034549836069345474, -0.029368769377470016, -0.023490531370043755, 0.07335732132196426, -0.01789912022650242, -0.09590879082679749, -0.09917886555194855, 0.04077110439538956, -0.004138635471463203, -0.12865442037582397, -0.0682518407702446, 0.058828480541706085, 0.0031141480430960655, -0.17325833439826965, 0.04359427094459534, 0.04709814488887787, -0.037884511053562164, 0.005908061284571886, 0.08498973399400711, 0.1493486762046814, 0.06978344917297363, 0.042924605309963226, -0.022629063576459885, -0.013306748121976852, 0.1967049390077591, -0.10903558135032654, 0.05304209142923355, 0.11357969045639038, -0.04092793166637421, 0.06784965097904205, 0.17283570766448975, -0.001344291027635336, -0.10243979096412659, 0.0380670428276062, 0.09035709500312805, -0.07493234425783157, -0.2563534379005432, -0.10061562061309814, -0.017979802563786507, -0.0068391915410757065, 0.10514212399721146, 0.06334827840328217, 0.01965327560901642, 0.017328409478068352, -0.11145897954702377, -0.01878732070326805, -0.06399524956941605, 0.078050896525383, 0.0566323883831501, -0.008204517886042595, 0.04606764018535614, -0.04213611036539078, 0.03709179908037186, 0.11103878915309906, 0.05020558461546898, 0.15178309381008148, -0.03261878341436386, 0.17678473889827728, 0.08336898684501648, 0.10476827621459961, -0.04883040487766266, 0.04795469343662262, 0.010157212615013123, 0.07468503713607788, -0.008446421474218369, -0.10270583629608154, -0.04287634789943695, 0.09475431591272354, 0.012489560060203075, -0.06929363310337067, 0.03164517134428024, -0.04786810278892517, 0.0318511426448822, 0.17766152322292328, -0.019044257700443268, -0.15081772208213806, -0.05508827418088913, 0.061976827681064606, -0.01288621686398983, -0.09927544742822647, -0.03803003206849098, 0.06684165447950363, -0.1465025544166565, 0.030113764107227325, -0.015695754438638687, 0.08553938567638397, -0.1185683161020279, -0.020583458244800568, -0.024857468903064728, 0.04937361925840378, -0.0015761100221425295, 0.1262451410293579, -0.13079270720481873, 0.11619525402784348, 0.012963988818228245, 0.03783077001571655, -0.10594211518764496, 0.04902641847729683, -0.05980049818754196, -0.03462597727775574, 0.16025501489639282, -0.008303281851112843, -0.09992370009422302, -0.05588832497596741, -0.10985998809337616, 0.005352635867893696, 0.07639851421117783, -0.10834185779094696, 0.10296940803527832, 0.03175249323248863, -0.02354324795305729, -0.017831165343523026, -0.01249774545431137, -0.14505866169929504, -0.22573786973953247, 0.1092236340045929, -0.1080406904220581, 0.05293036997318268, -0.04449835419654846, -0.036953724920749664, -0.05109812319278717, 0.18639807403087616, -0.06808972358703613, -0.06100790202617645, -0.12661293148994446, 0.035252511501312256, 0.1885286420583725, -0.05320950597524643, 0.04018870368599892, -0.04510621353983879, 0.18207895755767822, -0.009627310559153557, -0.050546564161777496, -0.009204929694533348, -0.0927891954779625, -0.1503096967935562, -0.04318910092115402, 0.1373627781867981, 0.05748918652534485, 0.018313482403755188, 0.018627673387527466, 0.043251752853393555, 0.011794516816735268, -0.08949460089206696, 0.038628265261650085, 0.07283397018909454, 0.10579559206962585, 0.016071660444140434, -0.04569435864686966, -0.08779603242874146, -0.10722599178552628, -0.09115508198738098, 0.08030705153942108, 0.13661617040634155, -0.06795181334018707, 0.1615106463432312, 0.14374837279319763, -0.11453209817409515, -0.19983330368995667, -0.04486091434955597, 0.029910219833254814, -0.0280655138194561, 0.10260476171970367, -0.19079169631004333, 0.07337499409914017, 0.05016017332673073, -0.00038473133463412523, 0.08348606526851654, -0.235521137714386, -0.1377783715724945, 0.021097790449857712, 0.024217382073402405, -0.23167580366134644, -0.17703008651733398, -0.10649769008159637, -0.0288835521787405, -0.1634541004896164, 0.1537817269563675, -0.02202286571264267, 0.029802989214658737, -0.00315976794809103, 0.06662959605455399, 0.05050171911716461, -0.05906510353088379, 0.13118036091327667, 0.008864335715770721, 0.01195861678570509, -0.09324911236763, -0.01579723134636879, 0.03868182376027107, -0.046904999762773514, 0.10704191029071808, 0.044447265565395355, 0.05154404044151306, -0.08632660657167435, -0.034791115671396255, -0.057505205273628235, 0.07101121544837952, -0.08092735707759857, -0.06719106435775757, -0.062183916568756104, 0.08107629418373108, 0.08502966165542603, -0.028420573100447655, 0.0011775456368923187, -0.032218120992183685, 0.0366264209151268, 0.20626401901245117, 0.10617296397686005, 0.03465240076184273, -0.12880301475524902, -0.005851924419403076, -0.016203632578253746, -0.005887500010430813, -0.14519447088241577, 0.03661779686808586, 0.08638646453619003, 0.0567404069006443, 0.07120630145072937, -0.03664492070674896, -0.19802698493003845, -0.01019284874200821, 0.06523558497428894, -0.10151959955692291, -0.20733097195625305, 0.026189569383859634, 0.1146063506603241, -0.1377461850643158, -0.07003161311149597, 0.09015540778636932, 0.014276489615440369, -0.031341392546892166, -0.0034349842462688684, 0.07195873558521271, 0.04920805245637894, 0.10779733955860138, 0.011735494248569012, 0.05363836884498596, -0.07958212494850159, 0.1020783856511116, 0.14851075410842896, -0.12673163414001465, 0.03977307677268982, 0.06188090145587921, -0.05229193717241287, -0.06777437031269073, 0.02585001289844513, 0.012301869690418243, 0.020275356248021126, -0.04999210685491562, 0.03333438187837601, -0.012684764340519905, 0.04562586545944214, 0.11292754113674164, 0.00048274535220116377, 0.025396928191184998, 0.03318466991186142, -0.018376657739281654, -0.0975942611694336, 0.10100466758012772, 0.04141212999820709, 0.039757050573825836, -0.03512495011091232, 0.030629385262727737, 0.032266177237033844, 0.006717502139508724, 0.014470851048827171, -0.03608021140098572, -0.04427322745323181, -0.0063180578872561455, -0.14410239458084106, 0.025687282904982567, -0.06692565977573395, 0.0023218996357172728, -0.014584069140255451, -0.02802807092666626, -0.022266827523708344, 0.016108471900224686, -0.053771067410707474, -0.07074491679668427, -0.04397519677877426, 0.11537505686283112, -0.199948251247406, -0.014505811966955662, 0.09313660860061646, -0.06692436337471008, 0.06844177097082138, -0.0035621256101876497, -0.022077830508351326, 0.013948028907179832, -0.0770692229270935, -0.012177792377769947, -0.02127862721681595, 0.05469166859984398, 0.02136482298374176, -0.16616427898406982, -0.015973027795553207, 0.010360365733504295, -0.06998547911643982, -0.009388779290020466, 0.04920308291912079, -0.14889489114284515, 0.02524421364068985, 0.050931014120578766, -0.04506613686680794, -0.04028818756341934, 0.05280352383852005, 0.05042967200279236, 0.0028355205431580544, 0.09443937987089157, 0.0009603654034435749, 0.04926903173327446, -0.1636357605457306, -0.05440003424882889, -0.008758759126067162, 0.004964467603713274, 0.012529658153653145, 0.02824975550174713, 0.0424288772046566, -0.0024628513492643833, 0.20457112789154053, -0.007654879707843065, 0.09549148380756378, 0.03173655644059181, 0.00935676135122776, -0.05683063715696335, 0.01464594341814518, 0.041277892887592316, 0.0047814687713980675, 0.027205631136894226, 0.02974860370159149, -0.026027977466583252, -0.03766524791717529, -0.0426846407353878, 0.06490471959114075, 0.15475788712501526, 0.1777854859828949, -0.038776639848947525, 0.07510700076818466, -0.16787654161453247, -0.061322249472141266, 0.040902890264987946, -0.029836423695087433, 0.04551662504673004, -0.06727012246847153, 0.042500175535678864, 0.07821149379014969, -0.11326068639755249, 0.14167220890522003, -0.05976889654994011, -0.05080372095108032, -0.03194667026400566, -0.1341330111026764, -0.05141604691743851, 0.031063538044691086, 0.011523524299263954, -0.10499942302703857, 0.09889484196901321, 0.09827446937561035, -0.015837805345654488, -0.005292451940476894, 0.10328066349029541, -0.060470499098300934, -0.06728015094995499, -0.036324165761470795, 0.01572088897228241, 0.022131256759166718, -0.002778418594971299, 0.09016694128513336, 0.02261781319975853, 0.0864044725894928, 0.06246114894747734, 0.0989796593785286, 0.05422813072800636, 0.021507909521460533, -0.048494815826416016, -0.0640731230378151, -0.012340599671006203, 0.0005174384568817914, -0.036381326615810394, 0.19457726180553436, 0.04947846010327339, 0.01244319323450327, 0.004082709550857544, 0.20450171828269958, 0.01043331902474165, -0.08443805575370789, -0.12217427045106888, 0.10874123871326447, -0.006073194555938244, 0.01799589768052101, 0.03757137805223465, -0.1314992606639862, 0.03949848935008049, 0.18223753571510315, 0.11721889674663544, 0.05111485347151756, 0.00884409248828888, 0.02767772227525711, 0.026084251701831818, -0.03815131634473801, 0.038687124848365784, 0.042353611439466476, 0.17488306760787964, -0.07158418744802475, 0.06698288768529892, -0.020881982520222664, -0.02525658719241619, -0.03645363077521324, 0.09701275825500488, -0.03859851136803627, 0.012114685028791428, -0.03958036005496979, 0.1094692200422287, -0.030751612037420273, -0.2932615876197815, -0.01245542149990797, -0.10011405497789383, -0.12973658740520477, -0.015055321156978607, 0.050082284957170486, -0.030076317489147186, 0.03044651448726654, 0.037492141127586365, -0.01583770662546158, 0.19519102573394775, 0.015140862204134464, -0.08917294442653656, -0.05438022315502167, 0.07315323501825333, -0.02617308683693409, 0.2486879974603653, -0.007210440933704376, 0.07277581840753555, 0.09819571673870087, -0.02401488646864891, -0.1696462482213974, 0.016031013801693916, 0.11142405867576599, -0.031423185020685196, 0.06575115025043488, 0.16884300112724304, -0.031419143080711365, 0.11717917025089264, 0.04907438904047012, -0.030780388042330742, 0.04753442108631134, 0.07286337018013, 0.04939787834882736, -0.09354700148105621, 0.08187659084796906, -0.0879121720790863, 0.14539489150047302, 0.10601402074098587, -0.032156892120838165, -0.0013181032845750451, -0.08628587424755096, 0.056514885276556015, -0.02295679785311222, 0.10477647185325623, 0.004809326492249966, -0.1614411473274231, 0.040834590792655945, 0.030339589342474937, 0.065792515873909, -0.22293353080749512, -0.0702369213104248, 0.13197802007198334, -0.04816822707653046, 0.004212331958115101, 0.09852199256420135, 0.04274073988199234, 0.008278220891952515, -0.059639666229486465, -0.09353971481323242, -0.0043807909823954105, 0.12139411270618439, -0.0965626984834671, -0.031740862876176834 ]
716eeb5e718457d7c1df21d7ccbbcadffe195e87
# Dataset Card for Dataset Name <!-- Provide a quick summary of the dataset. --> This dataset is currently incomplete; ongoing work. ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
crina-t/UnioNER
[ "task_categories:token-classification", "language:sv", "region:us" ]
2024-01-25T11:04:27+00:00
{"language": ["sv"], "task_categories": ["token-classification"], "pretty_name": "UnioNER"}
2024-01-29T21:31:31+00:00
[]
[ "sv" ]
TAGS #task_categories-token-classification #language-Swedish #region-us
# Dataset Card for Dataset Name This dataset is currently incomplete; ongoing work. ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Dataset Name\n\n\n\nThis dataset is currently incomplete; ongoing work.", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#task_categories-token-classification #language-Swedish #region-us \n", "# Dataset Card for Dataset Name\n\n\n\nThis dataset is currently incomplete; ongoing work.", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ 24, 19, 4, 40, 29, 3, 4, 9, 6, 5, 7, 4, 7, 10, 9, 5, 9, 8, 10, 46, 8, 7, 10, 5 ]
[ "passage: TAGS\n#task_categories-token-classification #language-Swedish #region-us \n# Dataset Card for Dataset Name\n\n\n\nThis dataset is currently incomplete; ongoing work.## Dataset Details### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:## Uses### Direct Use### Out-of-Scope Use## Dataset Structure## Dataset Creation### Curation Rationale### Source Data#### Data Collection and Processing#### Who are the source data producers?### Annotations [optional]#### Annotation process#### Who are the annotators?#### Personal and Sensitive Information## Bias, Risks, and Limitations### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:## Glossary [optional]## More Information [optional]## Dataset Card Authors [optional]## Dataset Card Contact" ]
[ -0.0379842147231102, 0.18399323523044586, -0.003404978895559907, 0.04836302995681763, 0.11673718690872192, -0.004589430056512356, 0.05102388188242912, 0.08603334426879883, 0.061209507286548615, 0.15109789371490479, 0.04283403605222702, 0.09845181554555893, 0.14243121445178986, 0.18299487233161926, -0.06983009725809097, -0.1836526244878769, 0.05403419956564903, -0.09732212126255035, 0.08203262835741043, 0.11675788462162018, 0.13418297469615936, -0.11727320402860641, 0.05819017440080643, -0.08249764889478683, 0.03251050412654877, -0.01941034384071827, -0.061849333345890045, -0.039554327726364136, 0.07971085608005524, 0.03170028328895569, 0.05851659178733826, 0.052134785801172256, 0.057265546172857285, -0.2807527184486389, 0.02440943382680416, 0.029090218245983124, 0.004562126938253641, 0.03613108769059181, 0.060844507068395615, -0.06781984120607376, 0.11762526631355286, -0.15976479649543762, 0.10729809105396271, 0.05236236751079559, -0.07765952497720718, -0.1891830414533615, -0.13072910904884338, 0.0384310819208622, 0.11316889524459839, 0.005050277337431908, -0.051699668169021606, 0.1538548618555069, -0.040073465555906296, 0.05332068353891373, 0.1250009834766388, -0.0434587225317955, -0.018954208120703697, 0.08303968608379364, 0.06776076555252075, 0.09792299568653107, -0.11242356896400452, 0.05516783893108368, 0.07007358968257904, 0.02295217104256153, 0.060316119343042374, -0.007897233590483665, 0.011092612519860268, 0.022574232891201973, -0.14044785499572754, -0.0973457470536232, 0.22750531136989594, 0.016140848398208618, -0.05550429970026016, -0.23009352385997772, 0.012431135401129723, 0.039751529693603516, -0.011215235106647015, -0.04428211227059364, 0.004702045116573572, -0.08124137669801712, 0.10475963354110718, -0.03320200368762016, -0.0546359159052372, -0.026446906849741936, 0.06370416283607483, -0.010498201474547386, 0.011990003287792206, -0.007297564763575792, 0.05740835517644882, 0.041984494775533676, 0.04439322277903557, -0.09711680561304092, -0.07074971497058868, -0.07484325021505356, -0.09487639367580414, -0.0400199294090271, 0.09172679483890533, 0.005206206813454628, 0.11693127453327179, 0.11807885020971298, -0.033944062888622284, 0.019586067646741867, -0.07749826461076736, 0.024148082360625267, 0.1667013317346573, -0.05891738831996918, -0.06022205203771591, -0.13267932832241058, -0.037761952728033066, 0.028922023251652718, 0.06541700661182404, -0.05206112191081047, -0.029097622260451317, 0.017425203695893288, 0.0847029983997345, 0.16476888954639435, 0.1064368262887001, 0.01705923303961754, -0.06924963742494583, -0.027751347050070763, 0.14857682585716248, -0.1442994326353073, -0.004730751272290945, 0.0017483934061601758, -0.08017139136791229, -0.0670497864484787, -0.03563453629612923, 0.040423378348350525, -0.07258649915456772, 0.1250254362821579, -0.03037722222507, -0.05024847388267517, -0.06448636204004288, -0.09581514447927475, 0.10101190209388733, -0.09797799587249756, -0.007462312933057547, -0.0029430545400828123, -0.1817588061094284, -0.05490148067474365, 0.039090026170015335, -0.0977029874920845, -0.08357764035463333, -0.036174386739730835, 0.016601402312517166, 0.018528824672102928, -0.08826897293329239, 0.12745559215545654, -0.08365138620138168, 0.061341360211372375, -0.08639059215784073, 0.08549822121858597, 0.08345288783311844, 0.03006366454064846, -0.09763773530721664, 0.06740802526473999, -0.10344904661178589, 0.08419422060251236, -0.16179952025413513, -0.01683981716632843, -0.18534712493419647, -0.07057134062051773, 0.11485593765974045, 0.0032235889229923487, -0.04640527814626694, 0.17387178540229797, -0.17924834787845612, -0.004242629278451204, 0.15864315629005432, -0.1469910591840744, -0.12028837949037552, 0.07919710129499435, -0.0744345560669899, 0.06374949216842651, 0.06374876946210861, 0.0637185275554657, 0.03661604970693588, -0.1473933309316635, -0.07271454483270645, -0.03814174234867096, -0.037560269236564636, 0.1310938596725464, 0.08448228240013123, -0.04866689071059227, 0.023421622812747955, 0.027499612420797348, 0.04299303889274597, -0.11629711091518402, -0.021258655935525894, -0.08859512954950333, 0.0015482400776818395, -0.047329846769571304, 0.061062537133693695, 0.01486571878194809, -0.1470811814069748, -0.053028807044029236, -0.0431537926197052, -0.03617187589406967, 0.05969386175274849, -0.018019329756498337, -0.03703219071030617, -0.030309554189443588, 0.04599758982658386, -0.010855067521333694, -0.019078856334090233, -0.14058923721313477, -0.1386793553829193, -0.0056637246161699295, -0.07109133899211884, 0.03463739901781082, 0.049942903220653534, 0.01586628332734108, 0.022738151252269745, -0.03359730541706085, 0.006462358869612217, 0.09893868118524551, 0.010267514735460281, 0.031333498656749725, -0.2198469638824463, 0.0015485105104744434, -0.08734014630317688, 0.08229222148656845, -0.18451881408691406, -0.012308058328926563, 0.1458306759595871, 0.13685759902000427, 0.017218219116330147, -0.034662436693906784, 0.11429408192634583, -0.007655461318790913, -0.03166072443127632, -0.061878353357315063, 0.031194861978292465, -0.08811714500188828, -0.021169070154428482, 0.048951342701911926, -0.13873682916164398, -0.022543279454112053, 0.09657979756593704, 0.08052317798137665, -0.10289204865694046, -0.185680091381073, -0.04946961998939514, -0.03168332204222679, -0.10042908042669296, -0.01262279786169529, 0.02396267093718052, 0.048630572855472565, 0.02092122845351696, -0.10383957624435425, -0.04304288327693939, 0.034677453339099884, -0.028623510152101517, -0.10453548282384872, 0.11650526523590088, 0.04125327989459038, -0.18715202808380127, 0.15499752759933472, -0.05299388989806175, 0.15419705212116241, 0.1475447416305542, 0.027436627075076103, -0.08249963819980621, 0.01422012411057949, 0.061477288603782654, 0.005879438016563654, 0.16329741477966309, -0.0870591402053833, 0.03665429353713989, 0.09278164058923721, -0.03424570709466934, 0.0006725281709805131, -0.04531900957226753, -0.005929210223257542, -0.00022739620180800557, -0.05424784496426582, -0.013116644695401192, -0.010550194419920444, 0.02838762290775776, 0.13337868452072144, 0.04367152974009514, 0.053514573723077774, 0.023423394188284874, -0.05547313392162323, -0.11826808750629425, 0.14008799195289612, -0.07859989255666733, -0.3369847536087036, -0.11125082522630692, -0.15712402760982513, -0.04213082790374756, -0.0025594860780984163, 0.044679444283246994, -0.03565562516450882, -0.12783725559711456, -0.08879803866147995, 0.04724006727337837, 0.005386005621403456, -0.14435818791389465, -0.11400627344846725, 0.03490827977657318, 0.008697405457496643, -0.10129132866859436, 0.02992037683725357, 0.05500827357172966, -0.013440290465950966, 0.033690910786390305, 0.05736405774950981, 0.15768592059612274, 0.09584297984838486, -0.00097415572963655, -0.013581853359937668, -0.022308241575956345, 0.19801340997219086, -0.13394427299499512, 0.05627848207950592, 0.10667760670185089, -0.07523133605718613, 0.076309435069561, 0.2649685740470886, 0.01334617380052805, -0.09275414049625397, 0.018424492329359055, 0.06510649621486664, -0.06821748614311218, -0.25616294145584106, -0.16040217876434326, -0.028992697596549988, -0.019376441836357117, 0.11707349121570587, 0.077442966401577, 0.05170672386884689, 0.0756799578666687, -0.13623856008052826, -0.08386631309986115, 0.03321927413344383, 0.09499064832925797, 0.011341718025505543, -0.006318066269159317, 0.06615957617759705, -0.018541553989052773, 0.006123255472630262, 0.11041184514760971, 0.07772773504257202, 0.20932991802692413, -0.016937077045440674, 0.15723615884780884, 0.0865088701248169, 0.09560460597276688, -0.016058748587965965, 0.04905913770198822, 0.01756180077791214, 0.07691916078329086, -0.03386866673827171, -0.07608270645141602, -0.0797925516963005, 0.07681796699762344, 0.06669122725725174, -0.025613505393266678, 0.004161404445767403, -0.09239576011896133, 0.05633946508169174, 0.10502344369888306, 0.032559726387262344, -0.14331239461898804, -0.02926560491323471, 0.10942055284976959, -0.023959603160619736, -0.13779930770397186, 0.005379823502153158, 0.05238543450832367, -0.21317486464977264, 0.1279045194387436, -0.058635152876377106, 0.1267504245042801, -0.1541847586631775, -0.03617648780345917, -0.07747239619493484, -0.02458447962999344, -0.05046846345067024, 0.1579805314540863, -0.20447474718093872, 0.21363455057144165, 0.02711588703095913, 0.036021195352077484, -0.14695750176906586, 0.027980230748653412, -0.03289426863193512, 0.010524787940084934, 0.24672633409500122, 0.009760288521647453, -0.12771596014499664, -0.07726558297872543, -0.08744712173938751, -0.011687741614878178, 0.061411015689373016, -0.022097958251833916, 0.09953409433364868, 0.00786325428634882, -0.009363154880702496, -0.06089860200881958, -0.13760046660900116, -0.11170922964811325, -0.2045452743768692, 0.052919380366802216, -0.09693370759487152, 0.02051413431763649, -0.029694801196455956, -0.0337076298892498, -0.03391023725271225, 0.17662937939167023, -0.1934947967529297, -0.14610782265663147, -0.1721869260072708, 0.008250207640230656, 0.10830795764923096, -0.032105375081300735, 0.0379396490752697, -0.002697231015190482, 0.19950920343399048, -0.05010959133505821, 0.0031475096475332975, 0.013249470852315426, -0.0741221085190773, -0.1668711006641388, -0.0483950600028038, 0.12327674776315689, 0.08190132677555084, 0.038416434079408646, 0.024979759007692337, 0.059473056346178055, 0.021738143637776375, -0.12044575810432434, 0.07232104986906052, 0.0929543524980545, 0.22666847705841064, 0.13815222680568695, 0.014456553384661674, -0.22003251314163208, -0.11280554533004761, -0.08963651210069656, 0.14020448923110962, 0.21928288042545319, -0.0519612655043602, 0.22117924690246582, 0.17570830881595612, -0.08238396048545837, -0.2537118196487427, 0.006870240438729525, -0.023918164893984795, -0.03208637982606888, 0.0576862171292305, -0.17028003931045532, -0.007683262694627047, 0.013803170993924141, -0.03513987734913826, 0.0949673056602478, -0.11160300672054291, -0.11068090051412582, 0.0741630494594574, 0.0947645977139473, -0.16954514384269714, -0.1044144406914711, -0.10997302085161209, -0.07540637999773026, -0.16092944145202637, 0.13643209636211395, -0.010681225918233395, -0.009842566214501858, -0.0027582983020693064, 0.09463779628276825, 0.04658779129385948, -0.07626700401306152, 0.19705936312675476, -0.03893379494547844, 0.03275013715028763, -0.13281811773777008, -0.091947041451931, -0.023200109601020813, -0.034718867391347885, 0.13358044624328613, -0.014807564206421375, 0.002977811498567462, -0.11254572868347168, -0.08679350465536118, -0.05368748679757118, 0.08612360060214996, -0.07083593308925629, -0.111856609582901, -0.10524154454469681, 0.10459320992231369, 0.12112625688314438, -0.02916303277015686, 0.018174948170781136, -0.11401934921741486, 0.04745057225227356, 0.2171306610107422, 0.23790715634822845, 0.06836076080799103, -0.07001519203186035, -0.02489379607141018, -0.04755362495779991, 0.027749622240662575, -0.12565122544765472, 0.04261845722794533, 0.11104151606559753, 0.03196761757135391, 0.14300911128520966, -0.06484133750200272, -0.15388816595077515, -0.0013290554052218795, 0.044065505266189575, -0.12377999722957611, -0.19569626450538635, 0.012869701720774174, 0.04137790575623512, -0.13012437522411346, -0.07429341971874237, 0.049635693430900574, -0.010944937355816364, -0.010892456397414207, -0.007516761776059866, 0.11204389482736588, 0.0606708787381649, 0.07773596048355103, 0.06488560140132904, 0.10907429456710815, -0.1419980674982071, 0.06599202752113342, 0.10621077567338943, -0.1949359029531479, 0.06991709768772125, 0.0001414967409800738, -0.073760025203228, -0.051503974944353104, 0.027583695948123932, 0.09847154468297958, 0.01751101203262806, -0.12424150109291077, 0.05785294249653816, -0.09601570665836334, 0.03403656929731369, 0.13544820249080658, -0.004057720769196749, -0.013421419076621532, 0.07252056896686554, -0.03079110011458397, -0.10574312508106232, 0.1352759450674057, 0.03204388543963432, 0.011918466538190842, -0.05079580098390579, -0.03768669441342354, 0.011958299204707146, 0.034336503595113754, -0.03972839564085007, -0.020625542849302292, -0.03339921310544014, -0.008892769925296307, -0.18177367746829987, 0.08195477724075317, -0.0838811844587326, 0.025716673582792282, -0.029283933341503143, -0.05464930087327957, -0.007814423181116581, 0.014292586594820023, -0.07219384610652924, 0.011751316487789154, -0.006083177402615547, 0.07895249128341675, -0.22330141067504883, -0.049355797469615936, 0.10702716559171677, -0.042389288544654846, 0.08304604887962341, -0.02135438658297062, -0.06523443013429642, 0.033994369208812714, -0.12703090906143188, 0.03607521206140518, 0.011056661605834961, 0.04199206456542015, 0.06285634636878967, -0.14797678589820862, -0.01857403852045536, 0.01857057400047779, -0.09019413590431213, 0.039042726159095764, -0.037382401525974274, -0.10343466699123383, 0.06409751623868942, 0.002097528427839279, -0.07104536145925522, -0.014501852914690971, 0.09888716042041779, 0.11101377010345459, -0.039563730359077454, 0.13781632483005524, -0.024028217419981956, 0.07326041162014008, -0.10485197603702545, -0.024070410057902336, -0.02765963412821293, -0.043096430599689484, -0.004991766531020403, 0.05458327755331993, 0.04222463071346283, -0.03833840787410736, 0.32177332043647766, 0.05150691792368889, 0.013670193962752819, 0.040814101696014404, -0.0007909292471595109, -0.02527870610356331, 0.0517769493162632, 0.0823633149266243, -0.04059811681509018, 0.007942232303321362, 0.035557519644498825, -0.02543911524116993, -0.03565550595521927, -0.02789139933884144, 0.12509821355342865, 0.1413944810628891, 0.15879295766353607, -0.07650325447320938, 0.022139903157949448, -0.1389884501695633, -0.01845560409128666, 0.03924408555030823, 0.03133314847946167, 0.04310648888349533, 0.017380215227603912, 0.07701326906681061, 0.1535521149635315, -0.18185275793075562, 0.09444265812635422, -0.06849858909845352, -0.09295759350061417, -0.10751207172870636, -0.159439817070961, -0.08489584177732468, 0.01078216265887022, 0.029880650341510773, -0.11192546784877777, 0.0768367126584053, 0.13294771313667297, 0.01028041634708643, -0.0002387371496297419, 0.0583476759493351, -0.07471442967653275, 0.009396065026521683, 0.019923677667975426, 0.05598904564976692, 0.005604458972811699, -0.018492326140403748, 0.04263696074485779, 0.014570595696568489, 0.03794233128428459, 0.0509822815656662, 0.0681728795170784, 0.04194164648652077, 0.0028888429515063763, -0.05138092115521431, -0.07880359888076782, 0.011477732099592686, 0.02980329841375351, 0.0077135018073022366, 0.18373242020606995, 0.039921652525663376, 0.029295438900589943, -0.022157443687319756, 0.2192295342683792, -0.0037693167105317116, -0.050266049802303314, -0.09669920057058334, 0.1078365370631218, -0.06336449831724167, 0.06046275049448013, 0.06466961652040482, -0.12482821196317673, 0.07197882235050201, 0.11034873872995377, 0.19117751717567444, 0.0030266742687672377, -0.0017217770218849182, -0.08273945748806, 0.027522919699549675, 0.03583906963467598, 0.042300816625356674, -0.0006786538287997246, 0.13753029704093933, -0.08385777473449707, 0.07209199666976929, -0.03889372944831848, -0.019505297765135765, -0.02593296580016613, 0.1297798901796341, 0.060328707098960876, -0.0003100259928032756, -0.11163228750228882, 0.16972969472408295, -0.11893820017576218, -0.23022767901420593, 0.03573785349726677, -0.06213191896677017, -0.17154313623905182, -0.00973248016089201, 0.00745964702218771, 0.04071355611085892, -0.02728353999555111, 0.043725185096263885, 0.00732679758220911, 0.06466776132583618, 0.05850357189774513, -0.15536841750144958, -0.03574370592832565, 0.07847298681735992, 0.03187112882733345, 0.2825425863265991, 0.029733380302786827, 0.08149001747369766, 0.08295745402574539, -0.11622661352157593, -0.1179729625582695, 0.022210337221622467, 0.1279240995645523, 0.032918933779001236, 0.08689165860414505, 0.21923287212848663, -0.01082870177924633, 0.07121815532445908, 0.08548343181610107, -0.09335584938526154, 0.008850413374602795, 0.047245897352695465, -0.040628522634506226, -0.06267844885587692, 0.14971359074115753, -0.08939769864082336, 0.1140807494521141, 0.08802913129329681, -0.012163495644927025, 0.015766939148306847, -0.045408301055431366, 0.0851447656750679, -0.031054819002747536, 0.13680776953697205, 0.0704244077205658, -0.1537914276123047, -0.021212249994277954, 0.004449952393770218, 0.06559230387210846, -0.26136213541030884, -0.005626436788588762, 0.021301014348864555, 0.024322839453816414, -0.03287086263298988, 0.10760638117790222, -0.005697631277143955, 0.0017752780113369226, -0.057790398597717285, -0.03563189506530762, 0.027560381218791008, 0.13535958528518677, -0.06409104913473129, 0.03393083065748215 ]
238b728722e4f9317f204d67f20fb10a95a417c0
# Dataset Card for Evaluation run of jsfs11/WestOrcaNeural-V2-DARETIES-7B <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [jsfs11/WestOrcaNeural-V2-DARETIES-7B](https://huggingface.co/jsfs11/WestOrcaNeural-V2-DARETIES-7B) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_jsfs11__WestOrcaNeural-V2-DARETIES-7B", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-01-25T11:13:58.761964](https://huggingface.co/datasets/open-llm-leaderboard/details_jsfs11__WestOrcaNeural-V2-DARETIES-7B/blob/main/results_2024-01-25T11-13-58.761964.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.652478669506538, "acc_stderr": 0.032075479095728145, "acc_norm": 0.6516679535788203, "acc_norm_stderr": 0.03274751010456304, "mc1": 0.5410036719706243, "mc1_stderr": 0.017444544447661206, "mc2": 0.6781155236894492, "mc2_stderr": 0.015092099222963333 }, "harness|arc:challenge|25": { "acc": 0.6996587030716723, "acc_stderr": 0.013395909309957002, "acc_norm": 0.7209897610921502, "acc_norm_stderr": 0.013106784883601333 }, "harness|hellaswag|10": { "acc": 0.7122087233618801, "acc_stderr": 0.00451808059452802, "acc_norm": 0.8820952001593309, "acc_norm_stderr": 0.003218362717491134 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.37, "acc_stderr": 0.048523658709391, "acc_norm": 0.37, "acc_norm_stderr": 0.048523658709391 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.6444444444444445, "acc_stderr": 0.04135176749720385, "acc_norm": 0.6444444444444445, "acc_norm_stderr": 0.04135176749720385 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.6973684210526315, "acc_stderr": 0.03738520676119668, "acc_norm": 0.6973684210526315, "acc_norm_stderr": 0.03738520676119668 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.65, "acc_stderr": 0.0479372485441102, "acc_norm": 0.65, "acc_norm_stderr": 0.0479372485441102 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.690566037735849, "acc_stderr": 0.02845015479411864, "acc_norm": 0.690566037735849, "acc_norm_stderr": 0.02845015479411864 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.7847222222222222, "acc_stderr": 0.03437079344106135, "acc_norm": 0.7847222222222222, "acc_norm_stderr": 0.03437079344106135 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.46, "acc_stderr": 0.05009082659620333, "acc_norm": 0.46, "acc_norm_stderr": 0.05009082659620333 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.52, "acc_stderr": 0.050211673156867795, "acc_norm": 0.52, "acc_norm_stderr": 0.050211673156867795 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.32, "acc_stderr": 0.046882617226215034, "acc_norm": 0.32, "acc_norm_stderr": 0.046882617226215034 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.6589595375722543, "acc_stderr": 0.036146654241808254, "acc_norm": 0.6589595375722543, "acc_norm_stderr": 0.036146654241808254 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.4117647058823529, "acc_stderr": 0.048971049527263666, "acc_norm": 0.4117647058823529, "acc_norm_stderr": 0.048971049527263666 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.75, "acc_stderr": 0.04351941398892446, "acc_norm": 0.75, "acc_norm_stderr": 0.04351941398892446 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.5659574468085107, "acc_stderr": 0.03240038086792747, "acc_norm": 0.5659574468085107, "acc_norm_stderr": 0.03240038086792747 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.49122807017543857, "acc_stderr": 0.04702880432049615, "acc_norm": 0.49122807017543857, "acc_norm_stderr": 0.04702880432049615 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.5724137931034483, "acc_stderr": 0.04122737111370333, "acc_norm": 0.5724137931034483, "acc_norm_stderr": 0.04122737111370333 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.42328042328042326, "acc_stderr": 0.025446365634406783, "acc_norm": 0.42328042328042326, "acc_norm_stderr": 0.025446365634406783 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.4444444444444444, "acc_stderr": 0.04444444444444449, "acc_norm": 0.4444444444444444, "acc_norm_stderr": 0.04444444444444449 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.3, "acc_stderr": 0.046056618647183814, "acc_norm": 0.3, "acc_norm_stderr": 0.046056618647183814 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.7967741935483871, "acc_stderr": 0.02289168798455496, "acc_norm": 0.7967741935483871, "acc_norm_stderr": 0.02289168798455496 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.5123152709359606, "acc_stderr": 0.035169204442208966, "acc_norm": 0.5123152709359606, "acc_norm_stderr": 0.035169204442208966 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.69, "acc_stderr": 0.04648231987117316, "acc_norm": 0.69, "acc_norm_stderr": 0.04648231987117316 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.793939393939394, "acc_stderr": 0.0315841532404771, "acc_norm": 0.793939393939394, "acc_norm_stderr": 0.0315841532404771 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.797979797979798, "acc_stderr": 0.02860620428922987, "acc_norm": 0.797979797979798, "acc_norm_stderr": 0.02860620428922987 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.9015544041450777, "acc_stderr": 0.021500249576033456, "acc_norm": 0.9015544041450777, "acc_norm_stderr": 0.021500249576033456 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.6564102564102564, "acc_stderr": 0.024078696580635477, "acc_norm": 0.6564102564102564, "acc_norm_stderr": 0.024078696580635477 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.34074074074074073, "acc_stderr": 0.028897748741131147, "acc_norm": 0.34074074074074073, "acc_norm_stderr": 0.028897748741131147 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.6764705882352942, "acc_stderr": 0.03038835355188679, "acc_norm": 0.6764705882352942, "acc_norm_stderr": 0.03038835355188679 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.3708609271523179, "acc_stderr": 0.03943966699183629, "acc_norm": 0.3708609271523179, "acc_norm_stderr": 0.03943966699183629 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.8477064220183487, "acc_stderr": 0.015405084393157074, "acc_norm": 0.8477064220183487, "acc_norm_stderr": 0.015405084393157074 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.5185185185185185, "acc_stderr": 0.034076320938540516, "acc_norm": 0.5185185185185185, "acc_norm_stderr": 0.034076320938540516 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.8480392156862745, "acc_stderr": 0.025195658428931796, "acc_norm": 0.8480392156862745, "acc_norm_stderr": 0.025195658428931796 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.7974683544303798, "acc_stderr": 0.026160568246601443, "acc_norm": 0.7974683544303798, "acc_norm_stderr": 0.026160568246601443 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.6905829596412556, "acc_stderr": 0.03102441174057221, "acc_norm": 0.6905829596412556, "acc_norm_stderr": 0.03102441174057221 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.7786259541984732, "acc_stderr": 0.03641297081313729, "acc_norm": 0.7786259541984732, "acc_norm_stderr": 0.03641297081313729 }, "harness|hendrycksTest-international_law|5": { "acc": 0.7933884297520661, "acc_stderr": 0.03695980128098823, "acc_norm": 0.7933884297520661, "acc_norm_stderr": 0.03695980128098823 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.7685185185185185, "acc_stderr": 0.04077494709252627, "acc_norm": 0.7685185185185185, "acc_norm_stderr": 0.04077494709252627 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.7668711656441718, "acc_stderr": 0.0332201579577674, "acc_norm": 0.7668711656441718, "acc_norm_stderr": 0.0332201579577674 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.41964285714285715, "acc_stderr": 0.04684099321077106, "acc_norm": 0.41964285714285715, "acc_norm_stderr": 0.04684099321077106 }, "harness|hendrycksTest-management|5": { "acc": 0.7669902912621359, "acc_stderr": 0.04185832598928315, "acc_norm": 0.7669902912621359, "acc_norm_stderr": 0.04185832598928315 }, "harness|hendrycksTest-marketing|5": { "acc": 0.9017094017094017, "acc_stderr": 0.019503444900757567, "acc_norm": 0.9017094017094017, "acc_norm_stderr": 0.019503444900757567 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.7, "acc_stderr": 0.046056618647183814, "acc_norm": 0.7, "acc_norm_stderr": 0.046056618647183814 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.822477650063857, "acc_stderr": 0.013664230995834841, "acc_norm": 0.822477650063857, "acc_norm_stderr": 0.013664230995834841 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.7427745664739884, "acc_stderr": 0.023532925431044283, "acc_norm": 0.7427745664739884, "acc_norm_stderr": 0.023532925431044283 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.42905027932960893, "acc_stderr": 0.016553287863116033, "acc_norm": 0.42905027932960893, "acc_norm_stderr": 0.016553287863116033 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.7156862745098039, "acc_stderr": 0.025829163272757482, "acc_norm": 0.7156862745098039, "acc_norm_stderr": 0.025829163272757482 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.7106109324758842, "acc_stderr": 0.025755865922632945, "acc_norm": 0.7106109324758842, "acc_norm_stderr": 0.025755865922632945 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.75, "acc_stderr": 0.02409347123262133, "acc_norm": 0.75, "acc_norm_stderr": 0.02409347123262133 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.4929078014184397, "acc_stderr": 0.02982449855912901, "acc_norm": 0.4929078014184397, "acc_norm_stderr": 0.02982449855912901 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.46870925684485004, "acc_stderr": 0.012745204626083133, "acc_norm": 0.46870925684485004, "acc_norm_stderr": 0.012745204626083133 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.6764705882352942, "acc_stderr": 0.02841820861940676, "acc_norm": 0.6764705882352942, "acc_norm_stderr": 0.02841820861940676 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.673202614379085, "acc_stderr": 0.018975427920507208, "acc_norm": 0.673202614379085, "acc_norm_stderr": 0.018975427920507208 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6545454545454545, "acc_stderr": 0.04554619617541054, "acc_norm": 0.6545454545454545, "acc_norm_stderr": 0.04554619617541054 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.7428571428571429, "acc_stderr": 0.02797982353874455, "acc_norm": 0.7428571428571429, "acc_norm_stderr": 0.02797982353874455 }, "harness|hendrycksTest-sociology|5": { "acc": 0.8407960199004975, "acc_stderr": 0.025870646766169136, "acc_norm": 0.8407960199004975, "acc_norm_stderr": 0.025870646766169136 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.85, "acc_stderr": 0.03588702812826371, "acc_norm": 0.85, "acc_norm_stderr": 0.03588702812826371 }, "harness|hendrycksTest-virology|5": { "acc": 0.5662650602409639, "acc_stderr": 0.03858158940685516, "acc_norm": 0.5662650602409639, "acc_norm_stderr": 0.03858158940685516 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.8187134502923976, "acc_stderr": 0.029547741687640044, "acc_norm": 0.8187134502923976, "acc_norm_stderr": 0.029547741687640044 }, "harness|truthfulqa:mc|0": { "mc1": 0.5410036719706243, "mc1_stderr": 0.017444544447661206, "mc2": 0.6781155236894492, "mc2_stderr": 0.015092099222963333 }, "harness|winogrande|5": { "acc": 0.8374112075769534, "acc_stderr": 0.01037045555134333 }, "harness|gsm8k|5": { "acc": 0.7065959059893859, "acc_stderr": 0.01254183081546149 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_jsfs11__WestOrcaNeural-V2-DARETIES-7B
[ "region:us" ]
2024-01-25T11:16:18+00:00
{"pretty_name": "Evaluation run of jsfs11/WestOrcaNeural-V2-DARETIES-7B", "dataset_summary": "Dataset automatically created during the evaluation run of model [jsfs11/WestOrcaNeural-V2-DARETIES-7B](https://huggingface.co/jsfs11/WestOrcaNeural-V2-DARETIES-7B) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_jsfs11__WestOrcaNeural-V2-DARETIES-7B\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-01-25T11:13:58.761964](https://huggingface.co/datasets/open-llm-leaderboard/details_jsfs11__WestOrcaNeural-V2-DARETIES-7B/blob/main/results_2024-01-25T11-13-58.761964.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.652478669506538,\n \"acc_stderr\": 0.032075479095728145,\n \"acc_norm\": 0.6516679535788203,\n \"acc_norm_stderr\": 0.03274751010456304,\n \"mc1\": 0.5410036719706243,\n \"mc1_stderr\": 0.017444544447661206,\n \"mc2\": 0.6781155236894492,\n \"mc2_stderr\": 0.015092099222963333\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.6996587030716723,\n \"acc_stderr\": 0.013395909309957002,\n \"acc_norm\": 0.7209897610921502,\n \"acc_norm_stderr\": 0.013106784883601333\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.7122087233618801,\n \"acc_stderr\": 0.00451808059452802,\n \"acc_norm\": 0.8820952001593309,\n \"acc_norm_stderr\": 0.003218362717491134\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.37,\n \"acc_stderr\": 0.048523658709391,\n \"acc_norm\": 0.37,\n \"acc_norm_stderr\": 0.048523658709391\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.6444444444444445,\n \"acc_stderr\": 0.04135176749720385,\n \"acc_norm\": 0.6444444444444445,\n \"acc_norm_stderr\": 0.04135176749720385\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.6973684210526315,\n \"acc_stderr\": 0.03738520676119668,\n \"acc_norm\": 0.6973684210526315,\n \"acc_norm_stderr\": 0.03738520676119668\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.65,\n \"acc_stderr\": 0.0479372485441102,\n \"acc_norm\": 0.65,\n \"acc_norm_stderr\": 0.0479372485441102\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.690566037735849,\n \"acc_stderr\": 0.02845015479411864,\n \"acc_norm\": 0.690566037735849,\n \"acc_norm_stderr\": 0.02845015479411864\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.7847222222222222,\n \"acc_stderr\": 0.03437079344106135,\n \"acc_norm\": 0.7847222222222222,\n \"acc_norm_stderr\": 0.03437079344106135\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.46,\n \"acc_stderr\": 0.05009082659620333,\n \"acc_norm\": 0.46,\n \"acc_norm_stderr\": 0.05009082659620333\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.52,\n \"acc_stderr\": 0.050211673156867795,\n \"acc_norm\": 0.52,\n \"acc_norm_stderr\": 0.050211673156867795\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.32,\n \"acc_stderr\": 0.046882617226215034,\n \"acc_norm\": 0.32,\n \"acc_norm_stderr\": 0.046882617226215034\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.6589595375722543,\n \"acc_stderr\": 0.036146654241808254,\n \"acc_norm\": 0.6589595375722543,\n \"acc_norm_stderr\": 0.036146654241808254\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.4117647058823529,\n \"acc_stderr\": 0.048971049527263666,\n \"acc_norm\": 0.4117647058823529,\n \"acc_norm_stderr\": 0.048971049527263666\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.75,\n \"acc_stderr\": 0.04351941398892446,\n \"acc_norm\": 0.75,\n \"acc_norm_stderr\": 0.04351941398892446\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.5659574468085107,\n \"acc_stderr\": 0.03240038086792747,\n \"acc_norm\": 0.5659574468085107,\n \"acc_norm_stderr\": 0.03240038086792747\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.49122807017543857,\n \"acc_stderr\": 0.04702880432049615,\n \"acc_norm\": 0.49122807017543857,\n \"acc_norm_stderr\": 0.04702880432049615\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.5724137931034483,\n \"acc_stderr\": 0.04122737111370333,\n \"acc_norm\": 0.5724137931034483,\n \"acc_norm_stderr\": 0.04122737111370333\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.42328042328042326,\n \"acc_stderr\": 0.025446365634406783,\n \"acc_norm\": 0.42328042328042326,\n \"acc_norm_stderr\": 0.025446365634406783\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.4444444444444444,\n \"acc_stderr\": 0.04444444444444449,\n \"acc_norm\": 0.4444444444444444,\n \"acc_norm_stderr\": 0.04444444444444449\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.3,\n \"acc_stderr\": 0.046056618647183814,\n \"acc_norm\": 0.3,\n \"acc_norm_stderr\": 0.046056618647183814\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.7967741935483871,\n \"acc_stderr\": 0.02289168798455496,\n \"acc_norm\": 0.7967741935483871,\n \"acc_norm_stderr\": 0.02289168798455496\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.5123152709359606,\n \"acc_stderr\": 0.035169204442208966,\n \"acc_norm\": 0.5123152709359606,\n \"acc_norm_stderr\": 0.035169204442208966\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.69,\n \"acc_stderr\": 0.04648231987117316,\n \"acc_norm\": 0.69,\n \"acc_norm_stderr\": 0.04648231987117316\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.793939393939394,\n \"acc_stderr\": 0.0315841532404771,\n \"acc_norm\": 0.793939393939394,\n \"acc_norm_stderr\": 0.0315841532404771\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.797979797979798,\n \"acc_stderr\": 0.02860620428922987,\n \"acc_norm\": 0.797979797979798,\n \"acc_norm_stderr\": 0.02860620428922987\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.9015544041450777,\n \"acc_stderr\": 0.021500249576033456,\n \"acc_norm\": 0.9015544041450777,\n \"acc_norm_stderr\": 0.021500249576033456\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.6564102564102564,\n \"acc_stderr\": 0.024078696580635477,\n \"acc_norm\": 0.6564102564102564,\n \"acc_norm_stderr\": 0.024078696580635477\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.34074074074074073,\n \"acc_stderr\": 0.028897748741131147,\n \"acc_norm\": 0.34074074074074073,\n \"acc_norm_stderr\": 0.028897748741131147\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.6764705882352942,\n \"acc_stderr\": 0.03038835355188679,\n \"acc_norm\": 0.6764705882352942,\n \"acc_norm_stderr\": 0.03038835355188679\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.3708609271523179,\n \"acc_stderr\": 0.03943966699183629,\n \"acc_norm\": 0.3708609271523179,\n \"acc_norm_stderr\": 0.03943966699183629\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.8477064220183487,\n \"acc_stderr\": 0.015405084393157074,\n \"acc_norm\": 0.8477064220183487,\n \"acc_norm_stderr\": 0.015405084393157074\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.5185185185185185,\n \"acc_stderr\": 0.034076320938540516,\n \"acc_norm\": 0.5185185185185185,\n \"acc_norm_stderr\": 0.034076320938540516\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.8480392156862745,\n \"acc_stderr\": 0.025195658428931796,\n \"acc_norm\": 0.8480392156862745,\n \"acc_norm_stderr\": 0.025195658428931796\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.7974683544303798,\n \"acc_stderr\": 0.026160568246601443,\n \"acc_norm\": 0.7974683544303798,\n \"acc_norm_stderr\": 0.026160568246601443\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.6905829596412556,\n \"acc_stderr\": 0.03102441174057221,\n \"acc_norm\": 0.6905829596412556,\n \"acc_norm_stderr\": 0.03102441174057221\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.7786259541984732,\n \"acc_stderr\": 0.03641297081313729,\n \"acc_norm\": 0.7786259541984732,\n \"acc_norm_stderr\": 0.03641297081313729\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.7933884297520661,\n \"acc_stderr\": 0.03695980128098823,\n \"acc_norm\": 0.7933884297520661,\n \"acc_norm_stderr\": 0.03695980128098823\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.7685185185185185,\n \"acc_stderr\": 0.04077494709252627,\n \"acc_norm\": 0.7685185185185185,\n \"acc_norm_stderr\": 0.04077494709252627\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.7668711656441718,\n \"acc_stderr\": 0.0332201579577674,\n \"acc_norm\": 0.7668711656441718,\n \"acc_norm_stderr\": 0.0332201579577674\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.41964285714285715,\n \"acc_stderr\": 0.04684099321077106,\n \"acc_norm\": 0.41964285714285715,\n \"acc_norm_stderr\": 0.04684099321077106\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.7669902912621359,\n \"acc_stderr\": 0.04185832598928315,\n \"acc_norm\": 0.7669902912621359,\n \"acc_norm_stderr\": 0.04185832598928315\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.9017094017094017,\n \"acc_stderr\": 0.019503444900757567,\n \"acc_norm\": 0.9017094017094017,\n \"acc_norm_stderr\": 0.019503444900757567\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.7,\n \"acc_stderr\": 0.046056618647183814,\n \"acc_norm\": 0.7,\n \"acc_norm_stderr\": 0.046056618647183814\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.822477650063857,\n \"acc_stderr\": 0.013664230995834841,\n \"acc_norm\": 0.822477650063857,\n \"acc_norm_stderr\": 0.013664230995834841\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.7427745664739884,\n \"acc_stderr\": 0.023532925431044283,\n \"acc_norm\": 0.7427745664739884,\n \"acc_norm_stderr\": 0.023532925431044283\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.42905027932960893,\n \"acc_stderr\": 0.016553287863116033,\n \"acc_norm\": 0.42905027932960893,\n \"acc_norm_stderr\": 0.016553287863116033\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.7156862745098039,\n \"acc_stderr\": 0.025829163272757482,\n \"acc_norm\": 0.7156862745098039,\n \"acc_norm_stderr\": 0.025829163272757482\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.7106109324758842,\n \"acc_stderr\": 0.025755865922632945,\n \"acc_norm\": 0.7106109324758842,\n \"acc_norm_stderr\": 0.025755865922632945\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.75,\n \"acc_stderr\": 0.02409347123262133,\n \"acc_norm\": 0.75,\n \"acc_norm_stderr\": 0.02409347123262133\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.4929078014184397,\n \"acc_stderr\": 0.02982449855912901,\n \"acc_norm\": 0.4929078014184397,\n \"acc_norm_stderr\": 0.02982449855912901\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.46870925684485004,\n \"acc_stderr\": 0.012745204626083133,\n \"acc_norm\": 0.46870925684485004,\n \"acc_norm_stderr\": 0.012745204626083133\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.6764705882352942,\n \"acc_stderr\": 0.02841820861940676,\n \"acc_norm\": 0.6764705882352942,\n \"acc_norm_stderr\": 0.02841820861940676\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.673202614379085,\n \"acc_stderr\": 0.018975427920507208,\n \"acc_norm\": 0.673202614379085,\n \"acc_norm_stderr\": 0.018975427920507208\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6545454545454545,\n \"acc_stderr\": 0.04554619617541054,\n \"acc_norm\": 0.6545454545454545,\n \"acc_norm_stderr\": 0.04554619617541054\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.7428571428571429,\n \"acc_stderr\": 0.02797982353874455,\n \"acc_norm\": 0.7428571428571429,\n \"acc_norm_stderr\": 0.02797982353874455\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.8407960199004975,\n \"acc_stderr\": 0.025870646766169136,\n \"acc_norm\": 0.8407960199004975,\n \"acc_norm_stderr\": 0.025870646766169136\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.85,\n \"acc_stderr\": 0.03588702812826371,\n \"acc_norm\": 0.85,\n \"acc_norm_stderr\": 0.03588702812826371\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5662650602409639,\n \"acc_stderr\": 0.03858158940685516,\n \"acc_norm\": 0.5662650602409639,\n \"acc_norm_stderr\": 0.03858158940685516\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.8187134502923976,\n \"acc_stderr\": 0.029547741687640044,\n \"acc_norm\": 0.8187134502923976,\n \"acc_norm_stderr\": 0.029547741687640044\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.5410036719706243,\n \"mc1_stderr\": 0.017444544447661206,\n \"mc2\": 0.6781155236894492,\n \"mc2_stderr\": 0.015092099222963333\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.8374112075769534,\n \"acc_stderr\": 0.01037045555134333\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.7065959059893859,\n \"acc_stderr\": 0.01254183081546149\n }\n}\n```", "repo_url": "https://huggingface.co/jsfs11/WestOrcaNeural-V2-DARETIES-7B", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_01_25T11_13_58.761964", "path": ["**/details_harness|arc:challenge|25_2024-01-25T11-13-58.761964.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-01-25T11-13-58.761964.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_01_25T11_13_58.761964", "path": ["**/details_harness|gsm8k|5_2024-01-25T11-13-58.761964.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-01-25T11-13-58.761964.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_01_25T11_13_58.761964", "path": ["**/details_harness|hellaswag|10_2024-01-25T11-13-58.761964.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-01-25T11-13-58.761964.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_01_25T11_13_58.761964", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-25T11-13-58.761964.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-25T11-13-58.761964.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-25T11-13-58.761964.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-25T11-13-58.761964.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-25T11-13-58.761964.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-25T11-13-58.761964.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-25T11-13-58.761964.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-25T11-13-58.761964.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-25T11-13-58.761964.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-25T11-13-58.761964.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-25T11-13-58.761964.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-25T11-13-58.761964.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-25T11-13-58.761964.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-25T11-13-58.761964.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-25T11-13-58.761964.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-25T11-13-58.761964.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-25T11-13-58.761964.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-25T11-13-58.761964.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-25T11-13-58.761964.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-25T11-13-58.761964.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-25T11-13-58.761964.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-25T11-13-58.761964.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-25T11-13-58.761964.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-25T11-13-58.761964.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-25T11-13-58.761964.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-25T11-13-58.761964.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-25T11-13-58.761964.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-25T11-13-58.761964.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-25T11-13-58.761964.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-25T11-13-58.761964.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-25T11-13-58.761964.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-25T11-13-58.761964.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-25T11-13-58.761964.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-25T11-13-58.761964.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-25T11-13-58.761964.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-25T11-13-58.761964.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-25T11-13-58.761964.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-25T11-13-58.761964.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-25T11-13-58.761964.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-25T11-13-58.761964.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-25T11-13-58.761964.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-25T11-13-58.761964.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-25T11-13-58.761964.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-25T11-13-58.761964.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-25T11-13-58.761964.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-25T11-13-58.761964.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-25T11-13-58.761964.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-25T11-13-58.761964.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-25T11-13-58.761964.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-25T11-13-58.761964.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-25T11-13-58.761964.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-25T11-13-58.761964.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-25T11-13-58.761964.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-25T11-13-58.761964.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-25T11-13-58.761964.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-25T11-13-58.761964.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-25T11-13-58.761964.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-25T11-13-58.761964.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-25T11-13-58.761964.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-25T11-13-58.761964.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-25T11-13-58.761964.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-25T11-13-58.761964.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-25T11-13-58.761964.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-25T11-13-58.761964.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-25T11-13-58.761964.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-25T11-13-58.761964.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-25T11-13-58.761964.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-25T11-13-58.761964.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-25T11-13-58.761964.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-25T11-13-58.761964.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-25T11-13-58.761964.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-25T11-13-58.761964.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-25T11-13-58.761964.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-25T11-13-58.761964.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-25T11-13-58.761964.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-25T11-13-58.761964.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-25T11-13-58.761964.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-25T11-13-58.761964.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-25T11-13-58.761964.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-25T11-13-58.761964.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-25T11-13-58.761964.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-25T11-13-58.761964.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-25T11-13-58.761964.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-25T11-13-58.761964.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-25T11-13-58.761964.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-25T11-13-58.761964.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-25T11-13-58.761964.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-25T11-13-58.761964.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-25T11-13-58.761964.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-25T11-13-58.761964.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-25T11-13-58.761964.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-25T11-13-58.761964.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-25T11-13-58.761964.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-25T11-13-58.761964.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-25T11-13-58.761964.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-25T11-13-58.761964.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-25T11-13-58.761964.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-25T11-13-58.761964.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-25T11-13-58.761964.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-25T11-13-58.761964.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-25T11-13-58.761964.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-25T11-13-58.761964.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-25T11-13-58.761964.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-25T11-13-58.761964.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-25T11-13-58.761964.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-25T11-13-58.761964.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-25T11-13-58.761964.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-25T11-13-58.761964.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-25T11-13-58.761964.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-25T11-13-58.761964.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-25T11-13-58.761964.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-25T11-13-58.761964.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-25T11-13-58.761964.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-25T11-13-58.761964.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_01_25T11_13_58.761964", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-25T11-13-58.761964.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-25T11-13-58.761964.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_01_25T11_13_58.761964", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-25T11-13-58.761964.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-25T11-13-58.761964.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_01_25T11_13_58.761964", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-25T11-13-58.761964.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-25T11-13-58.761964.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_01_25T11_13_58.761964", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-25T11-13-58.761964.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-25T11-13-58.761964.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_01_25T11_13_58.761964", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-25T11-13-58.761964.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-25T11-13-58.761964.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_01_25T11_13_58.761964", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-25T11-13-58.761964.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-25T11-13-58.761964.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_01_25T11_13_58.761964", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-25T11-13-58.761964.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-25T11-13-58.761964.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_01_25T11_13_58.761964", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-25T11-13-58.761964.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-25T11-13-58.761964.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_01_25T11_13_58.761964", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-25T11-13-58.761964.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-25T11-13-58.761964.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_01_25T11_13_58.761964", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-25T11-13-58.761964.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-25T11-13-58.761964.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_01_25T11_13_58.761964", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-25T11-13-58.761964.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-25T11-13-58.761964.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_01_25T11_13_58.761964", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-25T11-13-58.761964.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-25T11-13-58.761964.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_01_25T11_13_58.761964", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-25T11-13-58.761964.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-25T11-13-58.761964.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_01_25T11_13_58.761964", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-25T11-13-58.761964.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-25T11-13-58.761964.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_01_25T11_13_58.761964", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-25T11-13-58.761964.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-25T11-13-58.761964.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_01_25T11_13_58.761964", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-25T11-13-58.761964.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-25T11-13-58.761964.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_01_25T11_13_58.761964", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-25T11-13-58.761964.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-25T11-13-58.761964.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_01_25T11_13_58.761964", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-25T11-13-58.761964.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-25T11-13-58.761964.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_01_25T11_13_58.761964", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-25T11-13-58.761964.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-25T11-13-58.761964.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_01_25T11_13_58.761964", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-25T11-13-58.761964.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-25T11-13-58.761964.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_01_25T11_13_58.761964", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-25T11-13-58.761964.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-25T11-13-58.761964.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_01_25T11_13_58.761964", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-25T11-13-58.761964.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-25T11-13-58.761964.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_01_25T11_13_58.761964", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-25T11-13-58.761964.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-25T11-13-58.761964.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_01_25T11_13_58.761964", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-25T11-13-58.761964.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-25T11-13-58.761964.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_01_25T11_13_58.761964", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-25T11-13-58.761964.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-25T11-13-58.761964.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_01_25T11_13_58.761964", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-25T11-13-58.761964.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-25T11-13-58.761964.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_01_25T11_13_58.761964", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-25T11-13-58.761964.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-25T11-13-58.761964.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_01_25T11_13_58.761964", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-25T11-13-58.761964.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-25T11-13-58.761964.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_01_25T11_13_58.761964", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-25T11-13-58.761964.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-25T11-13-58.761964.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_01_25T11_13_58.761964", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-25T11-13-58.761964.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-25T11-13-58.761964.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_01_25T11_13_58.761964", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-25T11-13-58.761964.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-25T11-13-58.761964.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_01_25T11_13_58.761964", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-25T11-13-58.761964.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-25T11-13-58.761964.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_01_25T11_13_58.761964", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-25T11-13-58.761964.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-25T11-13-58.761964.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_01_25T11_13_58.761964", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-25T11-13-58.761964.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-25T11-13-58.761964.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_01_25T11_13_58.761964", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-25T11-13-58.761964.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-25T11-13-58.761964.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_01_25T11_13_58.761964", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-25T11-13-58.761964.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-25T11-13-58.761964.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_01_25T11_13_58.761964", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-25T11-13-58.761964.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-25T11-13-58.761964.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_01_25T11_13_58.761964", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-25T11-13-58.761964.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-25T11-13-58.761964.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_01_25T11_13_58.761964", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-25T11-13-58.761964.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-25T11-13-58.761964.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_01_25T11_13_58.761964", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-25T11-13-58.761964.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-25T11-13-58.761964.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_01_25T11_13_58.761964", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-25T11-13-58.761964.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-25T11-13-58.761964.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_01_25T11_13_58.761964", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-25T11-13-58.761964.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-25T11-13-58.761964.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_01_25T11_13_58.761964", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-25T11-13-58.761964.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-25T11-13-58.761964.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_01_25T11_13_58.761964", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-25T11-13-58.761964.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-25T11-13-58.761964.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_01_25T11_13_58.761964", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-25T11-13-58.761964.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-25T11-13-58.761964.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_01_25T11_13_58.761964", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-25T11-13-58.761964.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-25T11-13-58.761964.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_01_25T11_13_58.761964", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-25T11-13-58.761964.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-25T11-13-58.761964.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_01_25T11_13_58.761964", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-25T11-13-58.761964.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-25T11-13-58.761964.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_01_25T11_13_58.761964", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-25T11-13-58.761964.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-25T11-13-58.761964.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_01_25T11_13_58.761964", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-25T11-13-58.761964.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-25T11-13-58.761964.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_01_25T11_13_58.761964", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-25T11-13-58.761964.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-25T11-13-58.761964.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_01_25T11_13_58.761964", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-25T11-13-58.761964.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-25T11-13-58.761964.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_01_25T11_13_58.761964", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-25T11-13-58.761964.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-25T11-13-58.761964.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_01_25T11_13_58.761964", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-25T11-13-58.761964.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-25T11-13-58.761964.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_01_25T11_13_58.761964", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-25T11-13-58.761964.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-25T11-13-58.761964.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_01_25T11_13_58.761964", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-25T11-13-58.761964.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-25T11-13-58.761964.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_01_25T11_13_58.761964", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-25T11-13-58.761964.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-25T11-13-58.761964.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_01_25T11_13_58.761964", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-25T11-13-58.761964.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-25T11-13-58.761964.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_01_25T11_13_58.761964", "path": ["**/details_harness|winogrande|5_2024-01-25T11-13-58.761964.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-01-25T11-13-58.761964.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_01_25T11_13_58.761964", "path": ["results_2024-01-25T11-13-58.761964.parquet"]}, {"split": "latest", "path": ["results_2024-01-25T11-13-58.761964.parquet"]}]}]}
2024-01-25T11:16:39+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of jsfs11/WestOrcaNeural-V2-DARETIES-7B Dataset automatically created during the evaluation run of model jsfs11/WestOrcaNeural-V2-DARETIES-7B on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-01-25T11:13:58.761964(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of jsfs11/WestOrcaNeural-V2-DARETIES-7B\n\n\n\nDataset automatically created during the evaluation run of model jsfs11/WestOrcaNeural-V2-DARETIES-7B on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-25T11:13:58.761964(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of jsfs11/WestOrcaNeural-V2-DARETIES-7B\n\n\n\nDataset automatically created during the evaluation run of model jsfs11/WestOrcaNeural-V2-DARETIES-7B on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-25T11:13:58.761964(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ 6, 197, 67, 4, 40, 29, 3, 4, 9, 6, 5, 7, 4, 7, 10, 9, 5, 9, 8, 10, 46, 8, 7, 10, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of jsfs11/WestOrcaNeural-V2-DARETIES-7B\n\n\n\nDataset automatically created during the evaluation run of model jsfs11/WestOrcaNeural-V2-DARETIES-7B on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2024-01-25T11:13:58.761964(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):## Dataset Details### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:## Uses### Direct Use### Out-of-Scope Use## Dataset Structure## Dataset Creation### Curation Rationale### Source Data#### Data Collection and Processing#### Who are the source data producers?### Annotations [optional]#### Annotation process#### Who are the annotators?#### Personal and Sensitive Information## Bias, Risks, and Limitations### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:## Glossary [optional]## More Information [optional]" ]
[ -0.06170463562011719, 0.22005410492420197, -0.0058944025076925755, 0.039427343755960464, 0.09440130740404129, -0.013302193023264408, 0.015262327156960964, 0.11114820837974548, -0.011468056589365005, 0.1566845178604126, -0.019946729764342308, 0.09732097387313843, 0.08624854683876038, 0.1479302942752838, 0.005679677240550518, -0.11313328146934509, 0.022580062970519066, -0.07687986642122269, 0.06153577193617821, 0.07818295061588287, 0.0891217440366745, -0.0905887633562088, 0.058776725083589554, -0.042753539979457855, -0.022625835612416267, 0.01309286244213581, -0.08016978204250336, -0.030322352424263954, 0.08642708510160446, 0.08744247257709503, 0.02509928122162819, -0.025535935536026955, 0.019072212278842926, -0.23996388912200928, 0.016721663996577263, 0.08205846697092056, 0.003059672424569726, 0.05631045252084732, 0.13818074762821198, -0.058352842926979065, 0.04561610147356987, -0.07533984631299973, 0.05464806780219078, 0.042802389711141586, -0.12533703446388245, -0.12051868438720703, -0.14095039665699005, 0.009528038091957569, 0.07821956276893616, 0.05192795395851135, -0.026657691225409508, 0.12172844260931015, -0.025819456204771996, 0.04326900467276573, 0.09471342712640762, -0.09451595693826675, -0.028682038187980652, 0.030375927686691284, 0.022747233510017395, 0.04977257922291756, -0.10891593247652054, -0.028307300060987473, 0.026988303288817406, 0.04995983839035034, 0.019657135009765625, 0.0018141312757506967, -0.09373737871646881, 0.024687396362423897, -0.10443682968616486, -0.1094643846154213, 0.19545887410640717, 0.00763757573440671, -0.033804163336753845, -0.14579449594020844, -0.03175772354006767, -0.0026550604961812496, -0.000699101947247982, -0.03053535334765911, 0.010585295036435127, -0.015430253930389881, 0.07657639682292938, -0.02672254852950573, -0.10108551383018494, -0.009044654667377472, -0.01750772073864937, 0.029937107115983963, 0.0037867205683141947, -0.008872374892234802, 0.000012029730896756519, 0.11512674391269684, -0.045169029384851456, -0.10436739027500153, -0.06895606964826584, -0.043478094041347504, -0.11315124481916428, -0.04863237962126732, 0.020314667373895645, -0.06609909981489182, 0.03743390366435051, 0.2145942598581314, -0.057302869856357574, 0.02700250782072544, -0.09926260262727737, -0.007515518460422754, 0.12587279081344604, 0.07007459551095963, -0.055337999016046524, -0.06498559564352036, -0.014169955626130104, 0.02044949121773243, 0.012518574483692646, -0.018147548660635948, 0.014152408577501774, 0.05191464722156525, 0.05212290957570076, 0.1175285205245018, 0.11705053597688675, 0.01216807123273611, -0.07439162582159042, -0.02561202645301819, 0.2160581797361374, -0.16744156181812286, 0.013253666460514069, 0.015241304412484169, -0.04128082096576691, -0.09482807666063309, 0.06126529723405838, -0.006576533429324627, -0.0693187490105629, 0.10054833441972733, -0.05829828232526779, -0.06155049428343773, -0.0838974118232727, -0.04592961072921753, 0.056021299213171005, -0.016939137130975723, -0.024561652913689613, -0.06123952195048332, -0.1364966779947281, -0.08215580135583878, 0.021496396511793137, -0.07729534804821014, -0.022561293095350266, 0.02605034038424492, -0.02544742077589035, -0.020067377015948296, -0.017307208850979805, 0.1307399570941925, -0.06053883209824562, 0.0253617987036705, 0.015623560175299644, 0.001365377800539136, 0.078698068857193, 0.05113118514418602, -0.11958226561546326, 0.07792463153600693, -0.07632456719875336, 0.11178673803806305, -0.08646894991397858, 0.00921773724257946, -0.13128456473350525, 0.01868448592722416, -0.029644742608070374, 0.009669715538620949, 0.011492045596241951, 0.10213764756917953, -0.25190725922584534, 0.026445385068655014, 0.10424633324146271, -0.09227625280618668, -0.09467827528715134, 0.06198854744434357, -0.04737769812345505, 0.06720004975795746, 0.05141369625926018, 0.08900083601474762, 0.1020917147397995, -0.06908804178237915, -0.11389946192502975, -0.08198679983615875, -0.022624477744102478, 0.11982443183660507, 0.05842973291873932, -0.06297953426837921, 0.12674376368522644, 0.04311962053179741, -0.006183234509080648, -0.08507289737462997, -0.016191449016332626, -0.0677141323685646, -0.02468736656010151, -0.041329678148031235, -0.07200060784816742, 0.005830482114106417, -0.08643640577793121, -0.030527647584676743, -0.08689434081315994, 0.011592905037105083, 0.0769222155213356, -0.010413373820483685, 0.009360438212752342, -0.06395397335290909, 0.03921021521091461, -0.010726453736424446, 0.02422500029206276, -0.22045418620109558, -0.09233950078487396, 0.03398339822888374, -0.08987367153167725, 0.051796358078718185, 0.021654468029737473, 0.019336378201842308, 0.02904912270605564, -0.005574059672653675, 0.003590326989069581, 0.013372126966714859, -0.005029773805290461, -0.008589646779000759, -0.1343265175819397, -0.04679340124130249, -0.08841871470212936, 0.04785119369626045, -0.14261950552463531, -0.017616305500268936, 0.07296814769506454, 0.1582372635602951, 0.012046250514686108, -0.07686183601617813, 0.07373393326997757, -0.007960210554301739, -0.04440145194530487, -0.0645487979054451, -0.00593219231814146, -0.019005997106432915, 0.05902811512351036, 0.021118180826306343, -0.1907573640346527, -0.12704907357692719, 0.06841707974672318, 0.11416935175657272, -0.06707800179719925, -0.04956822469830513, -0.06862078607082367, -0.05374792590737343, -0.09148405492305756, -0.07396391034126282, 0.07762681692838669, 0.07653441280126572, 0.03572820499539375, -0.06644709408283234, -0.07110653072595596, 0.002161298179998994, 0.03312185779213905, -0.07613171637058258, 0.09753278642892838, 0.05836508050560951, -0.07405084371566772, 0.10837826132774353, 0.03773598372936249, 0.11770519614219666, 0.0816897302865982, 0.008828085847198963, -0.1048484817147255, -0.015079880133271217, 0.0488949716091156, 0.03173162043094635, 0.07395930588245392, -0.04933910071849823, 0.044726885855197906, 0.08187048882246017, -0.004974316339939833, 0.05258290469646454, -0.06421137601137161, 0.04165941849350929, 0.035375434905290604, 0.0022363001480698586, 0.04485704004764557, -0.0018267679261043668, 0.019043127074837685, 0.0652034729719162, 0.02152758277952671, 0.10806881636381149, -0.006277017295360565, -0.04253706708550453, -0.08805672824382782, 0.13958919048309326, -0.10831096023321152, -0.25503110885620117, -0.16155412793159485, -0.05548414960503578, -0.03474930673837662, 0.001260368269868195, 0.053507667034864426, -0.011764422059059143, -0.11376510560512543, -0.09413611888885498, 0.058431677520275116, 0.023907752707600594, -0.13010963797569275, -0.024778004735708237, 0.05492498725652695, 0.020241284742951393, -0.15738247334957123, 0.035705000162124634, 0.054563574492931366, -0.05378139764070511, 0.0027936729602515697, 0.09899165481328964, 0.12705805897712708, 0.07094521075487137, 0.03255845606327057, -0.024522537365555763, 0.010490049608051777, 0.20159099996089935, -0.10024512559175491, 0.04274884611368179, 0.12839928269386292, -0.05149442330002785, 0.06415259838104248, 0.18198978900909424, 0.009973058477044106, -0.09466712921857834, 0.03542828559875488, 0.0779595673084259, -0.059540022164583206, -0.27157968282699585, -0.07714106887578964, -0.037827491760253906, -0.0027624648064374924, 0.08821634948253632, 0.06511572748422623, 0.010750886052846909, 0.024280564859509468, -0.10416436940431595, -0.01960335299372673, -0.049853235483169556, 0.06213683262467384, 0.08124668151140213, 0.007742258720099926, 0.04343614727258682, -0.043423622846603394, 0.04514601081609726, 0.11617114394903183, 0.06061219424009323, 0.15324252843856812, -0.03002343513071537, 0.15453214943408966, 0.08383718878030777, 0.08335806429386139, -0.04039034619927406, 0.06604491919279099, 0.0054965270683169365, 0.060208581387996674, -0.0049774060025811195, -0.10107707977294922, -0.06225265562534332, 0.083580382168293, -0.00006953591218916699, -0.0425257608294487, 0.04195927083492279, -0.03896221145987511, 0.04801354929804802, 0.13418230414390564, 0.0018750157905742526, -0.14568543434143066, -0.06395178288221359, 0.06455632299184799, -0.02485092543065548, -0.11415054649114609, -0.025993866845965385, 0.07642572373151779, -0.13350053131580353, 0.004612288903445005, -0.018830854445695877, 0.09883534908294678, -0.11724790185689926, -0.018826546147465706, -0.027579661458730698, 0.09206083416938782, -0.006686202250421047, 0.11854846775531769, -0.1449863463640213, 0.09981250762939453, 0.008893386460840702, 0.04895723611116409, -0.08467541635036469, 0.07290072739124298, -0.03191179782152176, -0.04752136394381523, 0.14192138612270355, -0.015281639993190765, -0.10672209411859512, -0.04178641363978386, -0.11744603514671326, -0.001363162649795413, 0.049200672656297684, -0.10230766981840134, 0.10331981629133224, 0.024031871929764748, -0.01508493535220623, -0.02863486297428608, -0.014830097556114197, -0.12677426636219025, -0.22607779502868652, 0.09875156730413437, -0.10368439555168152, 0.06856880336999893, -0.03891343995928764, -0.033496372401714325, -0.05562154948711395, 0.1755938082933426, -0.07387730479240417, -0.06507735699415207, -0.12922845780849457, 0.03128105774521828, 0.18791744112968445, -0.05396297574043274, 0.048503223806619644, -0.04903277009725571, 0.18098857998847961, -0.0024917195551097393, -0.05155368149280548, -0.0039186119101941586, -0.09261823445558548, -0.14522719383239746, -0.0409729965031147, 0.14251497387886047, 0.045292772352695465, 0.010656335391104221, 0.005993398372083902, 0.04374467581510544, -0.0035229879431426525, -0.07951720058917999, 0.045277029275894165, 0.08262087404727936, 0.1151299998164177, 0.040816232562065125, -0.04232112318277359, -0.15991343557834625, -0.10329018533229828, -0.0843663439154625, 0.06037663295865059, 0.1344979852437973, -0.05277417227625847, 0.13931122422218323, 0.13246318697929382, -0.09905951470136642, -0.18899372220039368, -0.042089130729436874, 0.024096133187413216, -0.007633395027369261, 0.11037615686655045, -0.18997015058994293, 0.07563307881355286, 0.06388334929943085, -0.002935955300927162, 0.10810225456953049, -0.21054594218730927, -0.13050349056720734, 0.03148750215768814, 0.02263539843261242, -0.22765052318572998, -0.15319199860095978, -0.11441490799188614, -0.016644248738884926, -0.19390782713890076, 0.1332932412624359, 0.010691627860069275, 0.026987601071596146, -0.009197892621159554, 0.07684966176748276, 0.05514844134449959, -0.05758028104901314, 0.132905513048172, 0.013554533012211323, 0.03420180454850197, -0.09341451525688171, -0.031557295471429825, 0.03933454677462578, -0.051180534064769745, 0.08991556614637375, 0.05103324353694916, 0.05483291298151016, -0.10406957566738129, -0.03456857055425644, -0.04625209793448448, 0.04971640929579735, -0.07217209786176682, -0.057211942970752716, -0.04633214324712753, 0.07747646421194077, 0.06999445706605911, -0.02521505206823349, 0.04197303205728531, -0.030197082087397575, 0.036034584045410156, 0.24229088425636292, 0.1000991091132164, 0.03214399516582489, -0.09355726093053818, -0.020651759579777718, -0.006732519716024399, -0.004717501811683178, -0.1514788419008255, 0.05449647828936577, 0.09630019962787628, 0.031462617218494415, 0.08975280076265335, -0.025186119601130486, -0.176930233836174, -0.0025629200972616673, 0.08131689578294754, -0.11359072476625443, -0.1990436166524887, 0.02493114396929741, 0.12523376941680908, -0.14043647050857544, -0.0472673736512661, 0.0923265814781189, 0.029242202639579773, -0.027818914502859116, 0.006525839678943157, 0.07612526416778564, 0.04947125166654587, 0.10950181633234024, 0.006020319648087025, 0.05072406679391861, -0.08423568308353424, 0.11129492521286011, 0.1404809206724167, -0.10857831686735153, 0.017337454482913017, 0.06619302183389664, -0.048864204436540604, -0.0562230721116066, 0.023664133623242378, 0.03332936391234398, 0.029965320602059364, -0.043395813554525375, 0.03205544874072075, -0.03424141928553581, 0.05775025114417076, 0.10021931678056717, -0.0027655295561999083, 0.03403106704354286, 0.019200576469302177, -0.014230426400899887, -0.07992721349000931, 0.12142869830131531, 0.05520456284284592, 0.0353144109249115, -0.042499177157878876, 0.01496662013232708, -0.002042771317064762, -0.005544512998312712, 0.020241321995854378, -0.041124314069747925, -0.03009517677128315, -0.010367821902036667, -0.15297777950763702, 0.02487911842763424, -0.084477499127388, -0.006053075660020113, -0.007328378036618233, -0.023032011464238167, -0.021036041900515556, 0.02138555608689785, -0.05494871735572815, -0.05921662226319313, -0.05209588259458542, 0.10877101868391037, -0.19566935300827026, -0.0056002154015004635, 0.08713323622941971, -0.0776829719543457, 0.07683676481246948, 0.016274353489279747, -0.018021507188677788, 0.01218563411384821, -0.10430378466844559, -0.03307786211371422, -0.02198912389576435, 0.05515887960791588, 0.030015897005796432, -0.14754532277584076, -0.010566109791398048, 0.020893650129437447, -0.07459823787212372, -0.02351817674934864, 0.04199022054672241, -0.14871624112129211, 0.03471077233552933, 0.058779869228601456, -0.03996513783931732, -0.04052694886922836, 0.05659414455294609, 0.061581388115882874, 0.009236960671842098, 0.10672128200531006, 0.004887835588306189, 0.04684193432331085, -0.15753740072250366, -0.04287724569439888, -0.0022169710136950016, -0.0021896620746701956, -0.0005137286498211324, 0.017034456133842468, 0.04501494765281677, -0.009702673181891441, 0.18588724732398987, -0.00707104429602623, 0.07652238756418228, 0.0316331684589386, 0.011742375791072845, -0.059321288019418716, 0.029524507001042366, 0.04715431481599808, -0.0012260271469131112, 0.016064900904893875, 0.02058352902531624, -0.031868450343608856, -0.03363494947552681, -0.037825871258974075, 0.08158627897500992, 0.1505720615386963, 0.14328593015670776, -0.028603341430425644, 0.06517153233289719, -0.15318593382835388, -0.050724759697914124, 0.03702671825885773, -0.046355850994586945, 0.03293652459979057, -0.07552722096443176, 0.031008407473564148, 0.07582943141460419, -0.12583261728286743, 0.13946454226970673, -0.0625884160399437, -0.04799048975110054, -0.02664915658533573, -0.14520134031772614, -0.04807709902524948, 0.01711202599108219, 0.011335858143866062, -0.11401024460792542, 0.11750457435846329, 0.11725083738565445, -0.01082876417785883, -0.021624481305480003, 0.10601747781038284, -0.05896969139575958, -0.06454908102750778, -0.025392819195985794, 0.02189534716308117, 0.03199745714664459, -0.006623386405408382, 0.095390684902668, 0.014367174357175827, 0.07834763079881668, 0.06939267367124557, 0.09914646297693253, 0.08091799169778824, 0.010448357090353966, -0.04691878706216812, -0.06529644131660461, -0.0029206755571067333, -0.010990924201905727, -0.06437404453754425, 0.17401577532291412, 0.03989003971219063, 0.019401077181100845, 0.01730554737150669, 0.2192658931016922, -0.004619975108653307, -0.07882189005613327, -0.13816609978675842, 0.05780939385294914, 0.005915745627135038, 0.021256953477859497, 0.040037475526332855, -0.14282973110675812, 0.042488619685173035, 0.16695791482925415, 0.08971939235925674, 0.020082106813788414, 0.006550205871462822, 0.02959386631846428, 0.026976831257343292, -0.044360049068927765, 0.03270653262734413, 0.03532489016652107, 0.1617608368396759, -0.04788140580058098, 0.05204983055591583, -0.016695668920874596, -0.026050357148051262, -0.020199919119477272, 0.08487867563962936, -0.05639059469103813, 0.010517309419810772, -0.050585296005010605, 0.09094665944576263, -0.03722428157925606, -0.2831244468688965, -0.017528938129544258, -0.09700685739517212, -0.12888509035110474, -0.033185768872499466, 0.061277735978364944, -0.021344123408198357, 0.02717841975390911, 0.03783640265464783, -0.022142091765999794, 0.20876532793045044, 0.012255566194653511, -0.06734142452478409, -0.040749624371528625, 0.06901560723781586, -0.04266485571861267, 0.2343534678220749, 0.007645840756595135, 0.04142230004072189, 0.0890798568725586, -0.023909833282232285, -0.17308738827705383, 0.029690425843000412, 0.09987588971853256, -0.019160054624080658, 0.05780728533864021, 0.1587161421775818, -0.015634164214134216, 0.105772003531456, 0.05175706371665001, -0.020833438262343407, 0.037721503525972366, 0.06582192331552505, 0.028193224221467972, -0.08297166228294373, 0.07151421159505844, -0.07634226232767105, 0.12928366661071777, 0.11148436367511749, -0.02902580052614212, 0.0018083512550219893, -0.06952618807554245, 0.07204306870698929, -0.026211239397525787, 0.1309002935886383, -0.01050974428653717, -0.15583635866641998, 0.04306865856051445, 0.050710324198007584, 0.0643925666809082, -0.202773317694664, -0.06956004351377487, 0.10987401753664017, -0.049437157809734344, -0.018048491328954697, 0.11574089527130127, 0.05096371844410896, 0.019507955759763718, -0.05579199269413948, -0.06405877321958542, -0.0013104109093546867, 0.11706031113862991, -0.06168853119015694, -0.026627467945218086 ]
ff3c9840df47450b5c5eb72179e97ddcb3a2960b
</br> # Can LLMs Become Editors? ### Dataset Summary Judge is a new dataset for investigating how LLMs handle judging and writing responses with long term memory, short term memory and key information. To succeed, an LLM needs to make correct evaluations of new responses based on the short, long and key data provided. Along with this test, we can also evaulate how an LLM writes theres new responses as well. The coverage of questions in the dataset includes multiple categories like sports, music, history, gaming and more. #### Dataset Size This is the small version of the dataset with only 100 questions. Designed to be a low-cost test to find out how current LLMs handle these types of problems. #### LLM Results <img alt="benchmark" src="small_benchmark.png"> -- #### Initial Low Scores Across The Board During the experiments with JudgeSmall it was discovered that LLMs consistantly mixed up 4 point responses and 5 point responses. When taking this into account, scores increased dramatically for all LLMs. #### Self Reward Language Models (Link: https://arxiv.org/pdf/2401.10020.pdf) This paper was the inspiration for the creation of this dataset. The same scoring system used in this paper was used in the evaluation of LLMs with JudgeSmall. -- #### Future Work - Finding a way to prevent the mix up between a 4 point response and a 5 point response. - Finding out the proper instructions to increase GPT4's score. - Increasing the size of the dataset to create a training set for fine-tuning.
sgans/JudgeSmall
[ "task_categories:question-answering", "size_categories:n<1K", "language:en", "license:mit", "arxiv:2401.10020", "region:us" ]
2024-01-25T11:32:42+00:00
{"language": ["en"], "license": "mit", "size_categories": ["n<1K"], "task_categories": ["question-answering"]}
2024-01-25T12:08:30+00:00
[ "2401.10020" ]
[ "en" ]
TAGS #task_categories-question-answering #size_categories-n<1K #language-English #license-mit #arxiv-2401.10020 #region-us
</br> # Can LLMs Become Editors? ### Dataset Summary Judge is a new dataset for investigating how LLMs handle judging and writing responses with long term memory, short term memory and key information. To succeed, an LLM needs to make correct evaluations of new responses based on the short, long and key data provided. Along with this test, we can also evaulate how an LLM writes theres new responses as well. The coverage of questions in the dataset includes multiple categories like sports, music, history, gaming and more. #### Dataset Size This is the small version of the dataset with only 100 questions. Designed to be a low-cost test to find out how current LLMs handle these types of problems. #### LLM Results <img alt="benchmark" src="small_benchmark.png"> -- #### Initial Low Scores Across The Board During the experiments with JudgeSmall it was discovered that LLMs consistantly mixed up 4 point responses and 5 point responses. When taking this into account, scores increased dramatically for all LLMs. #### Self Reward Language Models (Link: URL This paper was the inspiration for the creation of this dataset. The same scoring system used in this paper was used in the evaluation of LLMs with JudgeSmall. -- #### Future Work - Finding a way to prevent the mix up between a 4 point response and a 5 point response. - Finding out the proper instructions to increase GPT4's score. - Increasing the size of the dataset to create a training set for fine-tuning.
[ "# Can LLMs Become Editors?", "### Dataset Summary\n\nJudge is a new dataset for investigating how LLMs handle judging and writing responses with long term memory, short term memory and key information.\nTo succeed, an LLM needs to make correct evaluations of new responses based on the short, long and key data provided. Along with this test, we \ncan also evaulate how an LLM writes theres new responses as well. The coverage of questions in the dataset includes multiple categories like sports, music, history, gaming and more.", "#### Dataset Size\n\nThis is the small version of the dataset with only 100 questions. Designed to be a low-cost test to find out how current LLMs handle these types\nof problems.", "#### LLM Results\n\n<img alt=\"benchmark\" src=\"small_benchmark.png\">\n\n--", "#### Initial Low Scores Across The Board\n\nDuring the experiments with JudgeSmall it was discovered that LLMs consistantly mixed up 4 point responses and 5 point responses. When taking this into\naccount, scores increased dramatically for all LLMs.", "#### Self Reward Language Models\n(Link: URL\n\nThis paper was the inspiration for the creation of this dataset. The same scoring system used in this paper was used in the evaluation of LLMs with JudgeSmall.\n\n--", "#### Future Work\n\n- Finding a way to prevent the mix up between a 4 point response and a 5 point response.\n- Finding out the proper instructions to increase GPT4's score.\n- Increasing the size of the dataset to create a training set for fine-tuning." ]
[ "TAGS\n#task_categories-question-answering #size_categories-n<1K #language-English #license-mit #arxiv-2401.10020 #region-us \n", "# Can LLMs Become Editors?", "### Dataset Summary\n\nJudge is a new dataset for investigating how LLMs handle judging and writing responses with long term memory, short term memory and key information.\nTo succeed, an LLM needs to make correct evaluations of new responses based on the short, long and key data provided. Along with this test, we \ncan also evaulate how an LLM writes theres new responses as well. The coverage of questions in the dataset includes multiple categories like sports, music, history, gaming and more.", "#### Dataset Size\n\nThis is the small version of the dataset with only 100 questions. Designed to be a low-cost test to find out how current LLMs handle these types\nof problems.", "#### LLM Results\n\n<img alt=\"benchmark\" src=\"small_benchmark.png\">\n\n--", "#### Initial Low Scores Across The Board\n\nDuring the experiments with JudgeSmall it was discovered that LLMs consistantly mixed up 4 point responses and 5 point responses. When taking this into\naccount, scores increased dramatically for all LLMs.", "#### Self Reward Language Models\n(Link: URL\n\nThis paper was the inspiration for the creation of this dataset. The same scoring system used in this paper was used in the evaluation of LLMs with JudgeSmall.\n\n--", "#### Future Work\n\n- Finding a way to prevent the mix up between a 4 point response and a 5 point response.\n- Finding out the proper instructions to increase GPT4's score.\n- Increasing the size of the dataset to create a training set for fine-tuning." ]
[ 46, 10, 115, 43, 27, 61, 50, 62 ]
[ "passage: TAGS\n#task_categories-question-answering #size_categories-n<1K #language-English #license-mit #arxiv-2401.10020 #region-us \n# Can LLMs Become Editors?### Dataset Summary\n\nJudge is a new dataset for investigating how LLMs handle judging and writing responses with long term memory, short term memory and key information.\nTo succeed, an LLM needs to make correct evaluations of new responses based on the short, long and key data provided. Along with this test, we \ncan also evaulate how an LLM writes theres new responses as well. The coverage of questions in the dataset includes multiple categories like sports, music, history, gaming and more.#### Dataset Size\n\nThis is the small version of the dataset with only 100 questions. Designed to be a low-cost test to find out how current LLMs handle these types\nof problems.#### LLM Results\n\n<img alt=\"benchmark\" src=\"small_benchmark.png\">\n\n--#### Initial Low Scores Across The Board\n\nDuring the experiments with JudgeSmall it was discovered that LLMs consistantly mixed up 4 point responses and 5 point responses. When taking this into\naccount, scores increased dramatically for all LLMs.#### Self Reward Language Models\n(Link: URL\n\nThis paper was the inspiration for the creation of this dataset. The same scoring system used in this paper was used in the evaluation of LLMs with JudgeSmall.\n\n--#### Future Work\n\n- Finding a way to prevent the mix up between a 4 point response and a 5 point response.\n- Finding out the proper instructions to increase GPT4's score.\n- Increasing the size of the dataset to create a training set for fine-tuning." ]
[ -0.07660842686891556, 0.028440799564123154, -0.004224028438329697, 0.07877205312252045, 0.07814916223287582, 0.005194096360355616, -0.005197760183364153, 0.13259238004684448, -0.07924512773752213, 0.0938146784901619, 0.006411352194845676, -0.016744278371334076, 0.04729555919766426, 0.1029544547200203, 0.07149432599544525, -0.11475639790296555, 0.002023872919380665, -0.08093708753585815, -0.01447816751897335, 0.0944778099656105, 0.07892885059118271, -0.09451210498809814, 0.05862908810377121, -0.017243025824427605, -0.007600152865052223, 0.020955286920070648, -0.0433364063501358, -0.017909353598952293, 0.027026213705539703, 0.04689246416091919, -0.0501164048910141, -0.016104308888316154, 0.005373985040932894, -0.22876332700252533, 0.027735887095332146, 0.03530711308121681, 0.07610473036766052, -0.011825044639408588, 0.07793824374675751, 0.05449433997273445, 0.11703328043222427, -0.12229324877262115, 0.023621225729584694, 0.07706279307603836, -0.059254735708236694, -0.18147318065166473, -0.18023619055747986, -0.029067503288388252, 0.08351361006498337, 0.11834830790758133, -0.03828137367963791, 0.05976251885294914, -0.06012309715151787, 0.002574864076450467, 0.20144200325012207, -0.15769007802009583, -0.00646792771294713, 0.08761201798915863, 0.03250126168131828, 0.0332457609474659, -0.09489560127258301, 0.042409636080265045, 0.04011271893978119, 0.016594376415014267, 0.014264940284192562, -0.009643176570534706, 0.07288651913404465, 0.07221429795026779, -0.08350256085395813, -0.05282120406627655, 0.11390640586614609, 0.007960681803524494, -0.09387011080980301, -0.2181650847196579, 0.08071944862604141, -0.05124644935131073, -0.023154621943831444, -0.05642957240343094, 0.056070782244205475, -0.024222781881690025, 0.057611241936683655, -0.07173317670822144, -0.1051977127790451, -0.006975045893341303, -0.030385462567210197, 0.020393120124936104, 0.007376641035079956, -0.01634356938302517, -0.05465975031256676, 0.05685236304998398, -0.01643374375998974, -0.041381869465112686, -0.14207243919372559, -0.01967606134712696, -0.12170948088169098, -0.016413571313023567, -0.0698990747332573, -0.11618627607822418, 0.052469294518232346, 0.09488484263420105, 0.01141331810504198, 0.02904597856104374, 0.04205538332462311, 0.0059027415700256824, 0.0371382050216198, 0.13353200256824493, -0.060350075364112854, -0.07622789591550827, 0.04652322083711624, 0.029949499294161797, 0.04212803393602371, -0.03859324753284454, -0.004750079475343227, -0.023109950125217438, -0.02276862971484661, 0.09400827437639236, 0.028571318835020065, 0.024502383545041084, -0.05221481993794441, -0.014114892110228539, 0.05523235350847244, -0.15459926426410675, 0.04949376359581947, 0.01595054194331169, -0.04017481952905655, 0.02730870433151722, 0.008385853841900826, -0.014620067551732063, -0.042463045567274094, 0.09453386068344116, -0.031298309564590454, -0.035593174397945404, -0.08113458752632141, -0.03305036202073097, 0.08520689606666565, 0.0960720106959343, -0.06208711117506027, 0.001172429183498025, -0.12442754209041595, -0.0913081094622612, 0.0021286779083311558, -0.03369686007499695, -0.019967349246144295, -0.023033620789647102, -0.06632767617702484, -0.033634308725595474, 0.026745565235614777, 0.09325126558542252, -0.028208469972014427, 0.06569913029670715, -0.016262026503682137, 0.034860871732234955, -0.007693416438996792, 0.03934208303689957, -0.09987186640501022, 0.03142717480659485, -0.19235719740390778, 0.12212245911359787, -0.03783218190073967, -0.05443244427442551, -0.07422375679016113, -0.058757342398166656, 0.010703334584832191, 0.04163745418190956, 0.013357427902519703, 0.11461575329303741, -0.20771577954292297, -0.05169646814465523, 0.02076042629778385, -0.07308655232191086, -0.12005099654197693, 0.07616227865219116, -0.04039405658841133, 0.028950432315468788, 0.09851258248090744, 0.10261301696300507, 0.2100353091955185, -0.05613314360380173, -0.11727724224328995, -0.07933162152767181, -0.0714549571275711, 0.06372243165969849, 0.0772775262594223, -0.018068363890051842, 0.012028486467897892, 0.058133356273174286, -0.07134983688592911, 0.006421063095331192, 0.011930304579436779, -0.0606735497713089, -0.038439296185970306, -0.015290230512619019, -0.09489886462688446, -0.004137079697102308, -0.030840370804071426, -0.052364323288202286, -0.08500561863183975, -0.012691221199929714, 0.08563609421253204, 0.012513294816017151, 0.059157710522413254, -0.12155070900917053, 0.1378047615289688, -0.026246780529618263, 0.02786196395754814, -0.1620059609413147, -0.03986859321594238, -0.0030868391040712595, -0.0598917230963707, 0.13820165395736694, 0.0037215612828731537, 0.03907274454832077, -0.013924829661846161, -0.022048797458410263, 0.023914840072393417, -0.0389239601790905, -0.05073945224285126, -0.046751540154218674, -0.1347491443157196, -0.017530402168631554, -0.036655060946941376, 0.09882117062807083, -0.11318524926900864, -0.03934049233794212, 0.06780339777469635, 0.018926195800304413, 0.008555513806641102, -0.09565185010433197, 0.02866935357451439, -0.019895430654287338, 0.0014590714126825333, -0.024410659447312355, -0.0020006168633699417, -0.03141710162162781, -0.11600463092327118, 0.03682001307606697, -0.17229405045509338, -0.10016075521707535, 0.07789626717567444, 0.11148690432310104, -0.06188317760825157, -0.08000556379556656, -0.02192380093038082, -0.02985466457903385, -0.08525251597166061, -0.013395876623690128, 0.09785079956054688, 0.06786172837018967, 0.06160406395792961, -0.08940418809652328, -0.06484942138195038, -0.03187531605362892, 0.0420038141310215, 0.009830741211771965, 0.06002988666296005, 0.029274601489305496, -0.07499663531780243, 0.001771067501977086, 0.04417283460497856, 0.0937180370092392, 0.13218390941619873, -0.024295790120959282, -0.11843995004892349, -0.017655737698078156, 0.03980196639895439, 0.0034917406737804413, 0.06435737013816833, 0.09162633866071701, 0.0978354886174202, 0.06103157997131348, 0.004895159974694252, 0.03901657834649086, -0.08165253698825836, -0.014865308068692684, 0.06461513787508011, -0.05232435464859009, -0.07176990061998367, 0.09347892552614212, 0.07976517081260681, 0.08241099119186401, 0.02532213367521763, 0.14538034796714783, -0.02261059544980526, -0.07062068581581116, -0.10407088696956635, 0.12733344733715057, -0.07689705491065979, -0.23496460914611816, -0.15644876658916473, 0.0818217545747757, -0.052794866263866425, -0.03125110641121864, 0.038390979170799255, -0.11032876372337341, -0.0941459983587265, -0.12060442566871643, 0.11448349058628082, -0.014520599506795406, -0.033788714557886124, -0.020704884082078934, 0.03267491236329079, -0.005540340673178434, -0.11979233473539352, 0.049938347190618515, 0.0299841295927763, -0.07353752106428146, -0.044267065823078156, 0.0683593899011612, 0.10594051331281662, 0.10174223780632019, -0.003067901125177741, 0.0007696820539422333, -0.040709059685468674, 0.17869402468204498, -0.13503724336624146, 0.04232608899474144, 0.13200339674949646, -0.015158417634665966, 0.032805971801280975, 0.04310191795229912, 0.007037369534373283, -0.06544776260852814, 0.04005257040262222, 0.09695526212453842, -0.07147461920976639, -0.2424100637435913, -0.038116373121738434, -0.05971948057413101, 0.01959148608148098, -0.0020963517017662525, 0.04090086743235588, -0.09361451864242554, 0.023634595796465874, -0.018292004242539406, -0.029906978830695152, 0.05007513612508774, 0.047644227743148804, 0.14741867780685425, -0.0011620742734521627, 0.026552271097898483, -0.112207792699337, -0.0034764932934194803, 0.1576436311006546, -0.04661044850945473, 0.20746615529060364, -0.10247737169265747, 0.07532446831464767, 0.08081909269094467, 0.02265462651848793, -0.05510278791189194, 0.08137402683496475, -0.047943562269210815, 0.006333325989544392, -0.04655063524842262, -0.0629962682723999, 0.006089029833674431, 0.056347671896219254, 0.08495070040225983, 0.022700820118188858, -0.08003351837396622, 0.06575369834899902, 0.04547346383333206, 0.15881292521953583, -0.01186461839824915, -0.05403472110629082, -0.021922118961811066, 0.06115299090743065, -0.02182867005467415, -0.06480852514505386, 0.060466792434453964, 0.09192333370447159, -0.11519160866737366, 0.1227719783782959, -0.021903548389673233, 0.07928512245416641, -0.011174607090651989, -0.017106391489505768, 0.04283614084124565, 0.10360986739397049, 0.01838652230799198, 0.13351008296012878, -0.15813195705413818, 0.01648860238492489, 0.024406379088759422, 0.03867526724934578, -0.0967196375131607, 0.017292754724621773, 0.007047968916594982, -0.06896991282701492, 0.12271004170179367, 0.02199634537100792, -0.08479969203472137, -0.04866521805524826, -0.057947926223278046, 0.04543285071849823, 0.11334209144115448, -0.00801224634051323, 0.13730692863464355, -0.06573624163866043, 0.02778196893632412, -0.04463104158639908, 0.006683562882244587, -0.026521513238549232, -0.24294014275074005, 0.04259739816188812, -0.019961154088377953, -0.050413910299539566, -0.0480453297495842, -0.02861214056611061, -0.030840875580906868, 0.20282480120658875, 0.04344736784696579, -0.038629114627838135, -0.12005110085010529, 0.03025386482477188, 0.19517187774181366, -0.04110848158597946, 0.023144669830799103, -0.003922844771295786, 0.11371516436338425, -0.006645618472248316, -0.027734773233532906, 0.029294565320014954, -0.004472447093576193, -0.1710391491651535, 0.04247455298900604, 0.10757090151309967, 0.030357731506228447, 0.047584548592567444, -0.005279229022562504, 0.02657916024327278, 0.028503116220235825, -0.14866359531879425, 0.012302005663514137, 0.06529371440410614, 0.00029822206124663353, 0.07046131044626236, 0.02360493689775467, 0.09886395931243896, -0.04657336324453354, -0.09393496811389923, 0.10541018843650818, 0.2902541756629944, -0.08070147037506104, 0.20659397542476654, -0.09277252107858658, -0.10836632549762726, -0.21389901638031006, 0.016577543690800667, -0.046002257615327835, 0.01994582638144493, 0.1465129852294922, -0.09455908834934235, 0.06786097586154938, -0.012264241464436054, -0.003879408584907651, 0.07294343411922455, -0.18273229897022247, -0.12971614301204681, 0.01187227200716734, 0.015509693883359432, 0.0018384694121778011, -0.10604970902204514, -0.03960607200860977, -0.05087914690375328, -0.05689757689833641, 0.018570419400930405, -0.031055349856615067, 0.11065838485956192, -0.04244474694132805, 0.06680499017238617, 0.07516342401504517, -0.04442208260297775, 0.12636280059814453, -0.017658768221735954, 0.09351418167352676, -0.04194619134068489, 0.05927670747041702, -0.006031270604580641, -0.050647735595703125, 0.1448061317205429, -0.024803515523672104, 0.04269177094101906, -0.1670169085264206, -0.028399210423231125, -0.05107731744647026, -0.025144238024950027, -0.05470990389585495, 0.03139181435108185, -0.20805223286151886, 0.06114228442311287, 0.07859211415052414, -0.04304831847548485, 0.015757864341139793, -0.04621734470129013, -0.01155347190797329, 0.005276195704936981, 0.19224241375923157, 0.0539504736661911, -0.12120138853788376, 0.03672610968351364, 0.02528313174843788, 0.009286242537200451, -0.09479812532663345, 0.07276087999343872, 0.11213813722133636, 0.026604749262332916, 0.021592512726783752, 0.02661837264895439, -0.1309143602848053, 0.004918924067169428, 0.08936286717653275, -0.06114258989691734, -0.23209470510482788, 0.026717836037278175, 0.03947478160262108, -0.09055241197347641, -0.09107920527458191, 0.07641170918941498, -0.02101314254105091, -0.034427400678396225, 0.03586343675851822, 0.07407651841640472, -0.0022450715769082308, 0.07903406769037247, 0.0014263951452448964, 0.02579203061759472, -0.08254314959049225, 0.11412815004587173, 0.038906171917915344, -0.12270993739366531, 0.054336708039045334, 0.07019028067588806, -0.02420860342681408, 0.0017453926848247647, 0.11086633056402206, 0.08708591759204865, -0.020912669599056244, -0.0249813050031662, -0.03447301313281059, -0.11118060350418091, 0.09947770833969116, 0.01309946645051241, 0.02089201658964157, 0.02752886340022087, -0.004787413403391838, 0.0212700255215168, -0.09549468010663986, 0.09888623654842377, 0.07212145626544952, -0.0021536170970648527, -0.052475109696388245, 0.02485956810414791, -0.024329539388418198, 0.0403122752904892, -0.022307444363832474, -0.0009438281413167715, -0.043880265206098557, 0.01910296268761158, -0.15320166945457458, -0.0017688405932858586, -0.05401860177516937, -0.011685757897794247, 0.004936486482620239, 0.0042105671018362045, -0.012652226723730564, 0.00493008503690362, -0.06444813311100006, -0.059647127985954285, -0.05034932866692543, 0.06382326781749725, -0.17390680313110352, 0.003564992919564247, 0.05883565917611122, -0.04459815099835396, 0.08187432587146759, -0.014131413772702217, -0.03484424203634262, 0.015483255498111248, -0.1253027766942978, -0.06769859790802002, 0.01146992202848196, 0.02959439344704151, -0.010215627029538155, -0.2098930925130844, -0.007030113134533167, 0.03134813904762268, -0.005698190536350012, 0.02945088967680931, 0.0030679977498948574, -0.1028902530670166, 0.01045465562492609, -0.03835785761475563, -0.005915697664022446, -0.03945388272404671, 0.008991393260657787, 0.014104506000876427, 0.05379616841673851, 0.16893506050109863, -0.0329911932349205, 0.06477509438991547, -0.18822148442268372, -0.03221002593636513, -0.030332747846841812, 0.06336941570043564, -0.07328120619058609, -0.03168678656220436, 0.07304324954748154, -0.0008350031566806138, 0.20250378549098969, -0.04039812833070755, -0.05222127586603165, 0.057439640164375305, 0.03797052800655365, 0.06547778844833374, 0.0019315958488732576, 0.013498619198799133, -0.05570126697421074, 0.003761611646041274, 0.13182663917541504, -0.01456624548882246, -0.009284065105021, -0.004276655148714781, 0.19087357819080353, 0.08157526701688766, 0.09929117560386658, 0.04832731932401657, -0.038705162703990936, -0.03802787512540817, -0.01156297791749239, 0.06680280715227127, 0.02056269161403179, 0.015781521797180176, -0.04312245547771454, 0.0924273356795311, 0.13137328624725342, -0.15646925568580627, 0.18551094830036163, -0.11299517750740051, -0.04067350551486015, -0.09715382754802704, -0.07980994135141373, -0.0666733905673027, -0.08641776442527771, 0.022954236716032028, -0.12853088974952698, 0.1261032223701477, -0.013381189666688442, 0.05717206001281738, -0.009658413007855415, 0.10874849557876587, -0.16793864965438843, -0.04715359956026077, 0.022891663014888763, -0.02190113067626953, 0.013312218710780144, 0.13467296957969666, 0.11242206394672394, 0.04623645544052124, -0.006143602076917887, 0.08865948021411896, 0.10583986341953278, 0.03459861874580383, 0.02766473963856697, -0.08192476630210876, -0.04213254153728485, -0.0012181876227259636, -0.008176651783287525, 0.09688271582126617, 0.15009446442127228, 0.02617070823907852, -0.03658001124858856, -0.03552357852458954, 0.20257262885570526, -0.05326548591256142, -0.09584134072065353, -0.09216032177209854, 0.038760773837566376, -0.005782815627753735, -0.028695374727249146, -0.006070699542760849, -0.11384887993335724, 0.022874483838677406, 0.23898965120315552, 0.11689011752605438, -0.07722903043031693, -0.010117379948496819, -0.0480494424700737, 0.017368827015161514, -0.004040854051709175, 0.1041887030005455, 0.04800727590918541, 0.2138957679271698, -0.04082709550857544, 0.03721274808049202, -0.09028969705104828, -0.015091503039002419, 0.017986882477998734, 0.2511434853076935, -0.024632781744003296, 0.028007661923766136, -0.03407587110996246, 0.06864055246114731, -0.07516342401504517, -0.2220722883939743, -0.017837028950452805, -0.031530316919088364, -0.12904800474643707, -0.014598139561712742, 0.06858886033296585, -0.025457842275500298, 0.03344263508915901, 0.03198397532105446, 0.01843903958797455, 0.07547427713871002, 0.019654706120491028, -0.13613390922546387, -0.020478416234254837, 0.08658403903245926, -0.010798611678183079, 0.15017424523830414, 0.08318711817264557, 0.07227179408073425, 0.0545758418738842, -0.040730006992816925, -0.16038872301578522, 0.01528683677315712, 0.020268449559807777, -0.04321380704641342, 0.0384710468351841, 0.1888764351606369, 0.03648409992456436, 0.08347675204277039, 0.08492500334978104, 0.023642228916287422, 0.049751508980989456, -0.016500966623425484, -0.041121706366539, -0.12938451766967773, 0.020665904507040977, -0.11657381802797318, 0.124968022108078, 0.14321471750736237, -0.028879428282380104, 0.06531213968992233, -0.069578155875206, 0.0622386708855629, -0.04630811884999275, 0.1543152928352356, 0.01370984222739935, -0.0757988691329956, 0.024645958095788956, 0.058765631169080734, 0.006024712696671486, -0.25740405917167664, -0.059445571154356, 0.0188223198056221, -0.04835353046655655, -0.000022106891265138984, 0.179454505443573, 0.03139009699225426, -0.01683208718895912, -0.055320821702480316, -0.11512795835733414, 0.04556868597865105, 0.10048875212669373, -0.11598809063434601, -0.02192695252597332 ]
db03b60239685b0c613fd56fdb38d9fe0bc28f33
# Dataset Card for Evaluation run of rombodawg/Everyone-Coder-33b-Base <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [rombodawg/Everyone-Coder-33b-Base](https://huggingface.co/rombodawg/Everyone-Coder-33b-Base) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_rombodawg__Everyone-Coder-33b-Base", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-01-25T11:38:57.857579](https://huggingface.co/datasets/open-llm-leaderboard/details_rombodawg__Everyone-Coder-33b-Base/blob/main/results_2024-01-25T11-38-57.857579.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.44306177149148895, "acc_stderr": 0.03479874859582811, "acc_norm": 0.44384185659230296, "acc_norm_stderr": 0.035519562520190985, "mc1": 0.2692778457772338, "mc1_stderr": 0.015528566637087283, "mc2": 0.42262985236898787, "mc2_stderr": 0.014934468412056808 }, "harness|arc:challenge|25": { "acc": 0.42918088737201365, "acc_stderr": 0.014464085894870653, "acc_norm": 0.4598976109215017, "acc_norm_stderr": 0.014564318856924848 }, "harness|hellaswag|10": { "acc": 0.4592710615415256, "acc_stderr": 0.004973199296339967, "acc_norm": 0.6171081457876917, "acc_norm_stderr": 0.00485098821516754 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.37, "acc_stderr": 0.04852365870939099, "acc_norm": 0.37, "acc_norm_stderr": 0.04852365870939099 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.34074074074074073, "acc_stderr": 0.04094376269996794, "acc_norm": 0.34074074074074073, "acc_norm_stderr": 0.04094376269996794 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.46710526315789475, "acc_stderr": 0.04060127035236397, "acc_norm": 0.46710526315789475, "acc_norm_stderr": 0.04060127035236397 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.43, "acc_stderr": 0.049756985195624284, "acc_norm": 0.43, "acc_norm_stderr": 0.049756985195624284 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.4377358490566038, "acc_stderr": 0.03053333843046752, "acc_norm": 0.4377358490566038, "acc_norm_stderr": 0.03053333843046752 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.3333333333333333, "acc_stderr": 0.039420826399272135, "acc_norm": 0.3333333333333333, "acc_norm_stderr": 0.039420826399272135 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.37, "acc_stderr": 0.04852365870939099, "acc_norm": 0.37, "acc_norm_stderr": 0.04852365870939099 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.51, "acc_stderr": 0.05024183937956911, "acc_norm": 0.51, "acc_norm_stderr": 0.05024183937956911 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.33, "acc_stderr": 0.047258156262526045, "acc_norm": 0.33, "acc_norm_stderr": 0.047258156262526045 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.3988439306358382, "acc_stderr": 0.03733626655383509, "acc_norm": 0.3988439306358382, "acc_norm_stderr": 0.03733626655383509 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.3137254901960784, "acc_stderr": 0.04617034827006718, "acc_norm": 0.3137254901960784, "acc_norm_stderr": 0.04617034827006718 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.66, "acc_stderr": 0.047609522856952365, "acc_norm": 0.66, "acc_norm_stderr": 0.047609522856952365 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.37872340425531914, "acc_stderr": 0.03170995606040655, "acc_norm": 0.37872340425531914, "acc_norm_stderr": 0.03170995606040655 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.3508771929824561, "acc_stderr": 0.044895393502707, "acc_norm": 0.3508771929824561, "acc_norm_stderr": 0.044895393502707 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.5172413793103449, "acc_stderr": 0.04164188720169375, "acc_norm": 0.5172413793103449, "acc_norm_stderr": 0.04164188720169375 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.41798941798941797, "acc_stderr": 0.02540255550326091, "acc_norm": 0.41798941798941797, "acc_norm_stderr": 0.02540255550326091 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.42857142857142855, "acc_stderr": 0.0442626668137991, "acc_norm": 0.42857142857142855, "acc_norm_stderr": 0.0442626668137991 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.2, "acc_stderr": 0.04020151261036843, "acc_norm": 0.2, "acc_norm_stderr": 0.04020151261036843 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.45806451612903226, "acc_stderr": 0.028343787250540636, "acc_norm": 0.45806451612903226, "acc_norm_stderr": 0.028343787250540636 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.3251231527093596, "acc_stderr": 0.032957975663112704, "acc_norm": 0.3251231527093596, "acc_norm_stderr": 0.032957975663112704 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.69, "acc_stderr": 0.04648231987117316, "acc_norm": 0.69, "acc_norm_stderr": 0.04648231987117316 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.5515151515151515, "acc_stderr": 0.038835659779569286, "acc_norm": 0.5515151515151515, "acc_norm_stderr": 0.038835659779569286 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.494949494949495, "acc_stderr": 0.035621707606254015, "acc_norm": 0.494949494949495, "acc_norm_stderr": 0.035621707606254015 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.44559585492227977, "acc_stderr": 0.035870149860756595, "acc_norm": 0.44559585492227977, "acc_norm_stderr": 0.035870149860756595 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.382051282051282, "acc_stderr": 0.02463554916390823, "acc_norm": 0.382051282051282, "acc_norm_stderr": 0.02463554916390823 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.337037037037037, "acc_stderr": 0.028820884666253252, "acc_norm": 0.337037037037037, "acc_norm_stderr": 0.028820884666253252 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.39915966386554624, "acc_stderr": 0.03181110032413925, "acc_norm": 0.39915966386554624, "acc_norm_stderr": 0.03181110032413925 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.2980132450331126, "acc_stderr": 0.037345356767871984, "acc_norm": 0.2980132450331126, "acc_norm_stderr": 0.037345356767871984 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.5302752293577981, "acc_stderr": 0.02139798860493697, "acc_norm": 0.5302752293577981, "acc_norm_stderr": 0.02139798860493697 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.4027777777777778, "acc_stderr": 0.033448873829978666, "acc_norm": 0.4027777777777778, "acc_norm_stderr": 0.033448873829978666 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.44607843137254904, "acc_stderr": 0.03488845451304974, "acc_norm": 0.44607843137254904, "acc_norm_stderr": 0.03488845451304974 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.5147679324894515, "acc_stderr": 0.032533028078777386, "acc_norm": 0.5147679324894515, "acc_norm_stderr": 0.032533028078777386 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.49327354260089684, "acc_stderr": 0.03355476596234354, "acc_norm": 0.49327354260089684, "acc_norm_stderr": 0.03355476596234354 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.5190839694656488, "acc_stderr": 0.043820947055509867, "acc_norm": 0.5190839694656488, "acc_norm_stderr": 0.043820947055509867 }, "harness|hendrycksTest-international_law|5": { "acc": 0.5454545454545454, "acc_stderr": 0.04545454545454548, "acc_norm": 0.5454545454545454, "acc_norm_stderr": 0.04545454545454548 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.46296296296296297, "acc_stderr": 0.04820403072760627, "acc_norm": 0.46296296296296297, "acc_norm_stderr": 0.04820403072760627 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.44171779141104295, "acc_stderr": 0.039015918258361836, "acc_norm": 0.44171779141104295, "acc_norm_stderr": 0.039015918258361836 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.3482142857142857, "acc_stderr": 0.04521829902833585, "acc_norm": 0.3482142857142857, "acc_norm_stderr": 0.04521829902833585 }, "harness|hendrycksTest-management|5": { "acc": 0.6019417475728155, "acc_stderr": 0.04846748253977239, "acc_norm": 0.6019417475728155, "acc_norm_stderr": 0.04846748253977239 }, "harness|hendrycksTest-marketing|5": { "acc": 0.6965811965811965, "acc_stderr": 0.030118210106942662, "acc_norm": 0.6965811965811965, "acc_norm_stderr": 0.030118210106942662 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.45, "acc_stderr": 0.05, "acc_norm": 0.45, "acc_norm_stderr": 0.05 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.4789272030651341, "acc_stderr": 0.017864076786212903, "acc_norm": 0.4789272030651341, "acc_norm_stderr": 0.017864076786212903 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.45375722543352603, "acc_stderr": 0.026803720583206177, "acc_norm": 0.45375722543352603, "acc_norm_stderr": 0.026803720583206177 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.3418994413407821, "acc_stderr": 0.015864506461604633, "acc_norm": 0.3418994413407821, "acc_norm_stderr": 0.015864506461604633 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.46405228758169936, "acc_stderr": 0.028555827516528784, "acc_norm": 0.46405228758169936, "acc_norm_stderr": 0.028555827516528784 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.42765273311897106, "acc_stderr": 0.02809924077580956, "acc_norm": 0.42765273311897106, "acc_norm_stderr": 0.02809924077580956 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.4166666666666667, "acc_stderr": 0.027431623722415005, "acc_norm": 0.4166666666666667, "acc_norm_stderr": 0.027431623722415005 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.3546099290780142, "acc_stderr": 0.028538650028878634, "acc_norm": 0.3546099290780142, "acc_norm_stderr": 0.028538650028878634 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.32529335071707954, "acc_stderr": 0.01196531153657153, "acc_norm": 0.32529335071707954, "acc_norm_stderr": 0.01196531153657153 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.4264705882352941, "acc_stderr": 0.030042615832714874, "acc_norm": 0.4264705882352941, "acc_norm_stderr": 0.030042615832714874 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.36764705882352944, "acc_stderr": 0.019506291693954854, "acc_norm": 0.36764705882352944, "acc_norm_stderr": 0.019506291693954854 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.509090909090909, "acc_stderr": 0.0478833976870286, "acc_norm": 0.509090909090909, "acc_norm_stderr": 0.0478833976870286 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.5591836734693878, "acc_stderr": 0.03178419114175363, "acc_norm": 0.5591836734693878, "acc_norm_stderr": 0.03178419114175363 }, "harness|hendrycksTest-sociology|5": { "acc": 0.5223880597014925, "acc_stderr": 0.03531987930208731, "acc_norm": 0.5223880597014925, "acc_norm_stderr": 0.03531987930208731 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.59, "acc_stderr": 0.04943110704237101, "acc_norm": 0.59, "acc_norm_stderr": 0.04943110704237101 }, "harness|hendrycksTest-virology|5": { "acc": 0.3614457831325301, "acc_stderr": 0.0374005938202932, "acc_norm": 0.3614457831325301, "acc_norm_stderr": 0.0374005938202932 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.42105263157894735, "acc_stderr": 0.03786720706234214, "acc_norm": 0.42105263157894735, "acc_norm_stderr": 0.03786720706234214 }, "harness|truthfulqa:mc|0": { "mc1": 0.2692778457772338, "mc1_stderr": 0.015528566637087283, "mc2": 0.42262985236898787, "mc2_stderr": 0.014934468412056808 }, "harness|winogrande|5": { "acc": 0.6306235201262825, "acc_stderr": 0.013564470596053523 }, "harness|gsm8k|5": { "acc": 0.3980288097043215, "acc_stderr": 0.013483026939074817 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_rombodawg__Everyone-Coder-33b-Base
[ "region:us" ]
2024-01-25T11:41:17+00:00
{"pretty_name": "Evaluation run of rombodawg/Everyone-Coder-33b-Base", "dataset_summary": "Dataset automatically created during the evaluation run of model [rombodawg/Everyone-Coder-33b-Base](https://huggingface.co/rombodawg/Everyone-Coder-33b-Base) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_rombodawg__Everyone-Coder-33b-Base\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-01-25T11:38:57.857579](https://huggingface.co/datasets/open-llm-leaderboard/details_rombodawg__Everyone-Coder-33b-Base/blob/main/results_2024-01-25T11-38-57.857579.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.44306177149148895,\n \"acc_stderr\": 0.03479874859582811,\n \"acc_norm\": 0.44384185659230296,\n \"acc_norm_stderr\": 0.035519562520190985,\n \"mc1\": 0.2692778457772338,\n \"mc1_stderr\": 0.015528566637087283,\n \"mc2\": 0.42262985236898787,\n \"mc2_stderr\": 0.014934468412056808\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.42918088737201365,\n \"acc_stderr\": 0.014464085894870653,\n \"acc_norm\": 0.4598976109215017,\n \"acc_norm_stderr\": 0.014564318856924848\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.4592710615415256,\n \"acc_stderr\": 0.004973199296339967,\n \"acc_norm\": 0.6171081457876917,\n \"acc_norm_stderr\": 0.00485098821516754\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.37,\n \"acc_stderr\": 0.04852365870939099,\n \"acc_norm\": 0.37,\n \"acc_norm_stderr\": 0.04852365870939099\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.34074074074074073,\n \"acc_stderr\": 0.04094376269996794,\n \"acc_norm\": 0.34074074074074073,\n \"acc_norm_stderr\": 0.04094376269996794\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.46710526315789475,\n \"acc_stderr\": 0.04060127035236397,\n \"acc_norm\": 0.46710526315789475,\n \"acc_norm_stderr\": 0.04060127035236397\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.43,\n \"acc_stderr\": 0.049756985195624284,\n \"acc_norm\": 0.43,\n \"acc_norm_stderr\": 0.049756985195624284\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.4377358490566038,\n \"acc_stderr\": 0.03053333843046752,\n \"acc_norm\": 0.4377358490566038,\n \"acc_norm_stderr\": 0.03053333843046752\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.3333333333333333,\n \"acc_stderr\": 0.039420826399272135,\n \"acc_norm\": 0.3333333333333333,\n \"acc_norm_stderr\": 0.039420826399272135\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.37,\n \"acc_stderr\": 0.04852365870939099,\n \"acc_norm\": 0.37,\n \"acc_norm_stderr\": 0.04852365870939099\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.51,\n \"acc_stderr\": 0.05024183937956911,\n \"acc_norm\": 0.51,\n \"acc_norm_stderr\": 0.05024183937956911\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.33,\n \"acc_stderr\": 0.047258156262526045,\n \"acc_norm\": 0.33,\n \"acc_norm_stderr\": 0.047258156262526045\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.3988439306358382,\n \"acc_stderr\": 0.03733626655383509,\n \"acc_norm\": 0.3988439306358382,\n \"acc_norm_stderr\": 0.03733626655383509\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.3137254901960784,\n \"acc_stderr\": 0.04617034827006718,\n \"acc_norm\": 0.3137254901960784,\n \"acc_norm_stderr\": 0.04617034827006718\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.66,\n \"acc_stderr\": 0.047609522856952365,\n \"acc_norm\": 0.66,\n \"acc_norm_stderr\": 0.047609522856952365\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.37872340425531914,\n \"acc_stderr\": 0.03170995606040655,\n \"acc_norm\": 0.37872340425531914,\n \"acc_norm_stderr\": 0.03170995606040655\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.3508771929824561,\n \"acc_stderr\": 0.044895393502707,\n \"acc_norm\": 0.3508771929824561,\n \"acc_norm_stderr\": 0.044895393502707\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.5172413793103449,\n \"acc_stderr\": 0.04164188720169375,\n \"acc_norm\": 0.5172413793103449,\n \"acc_norm_stderr\": 0.04164188720169375\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.41798941798941797,\n \"acc_stderr\": 0.02540255550326091,\n \"acc_norm\": 0.41798941798941797,\n \"acc_norm_stderr\": 0.02540255550326091\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.42857142857142855,\n \"acc_stderr\": 0.0442626668137991,\n \"acc_norm\": 0.42857142857142855,\n \"acc_norm_stderr\": 0.0442626668137991\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.2,\n \"acc_stderr\": 0.04020151261036843,\n \"acc_norm\": 0.2,\n \"acc_norm_stderr\": 0.04020151261036843\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.45806451612903226,\n \"acc_stderr\": 0.028343787250540636,\n \"acc_norm\": 0.45806451612903226,\n \"acc_norm_stderr\": 0.028343787250540636\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.3251231527093596,\n \"acc_stderr\": 0.032957975663112704,\n \"acc_norm\": 0.3251231527093596,\n \"acc_norm_stderr\": 0.032957975663112704\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.69,\n \"acc_stderr\": 0.04648231987117316,\n \"acc_norm\": 0.69,\n \"acc_norm_stderr\": 0.04648231987117316\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.5515151515151515,\n \"acc_stderr\": 0.038835659779569286,\n \"acc_norm\": 0.5515151515151515,\n \"acc_norm_stderr\": 0.038835659779569286\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.494949494949495,\n \"acc_stderr\": 0.035621707606254015,\n \"acc_norm\": 0.494949494949495,\n \"acc_norm_stderr\": 0.035621707606254015\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.44559585492227977,\n \"acc_stderr\": 0.035870149860756595,\n \"acc_norm\": 0.44559585492227977,\n \"acc_norm_stderr\": 0.035870149860756595\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.382051282051282,\n \"acc_stderr\": 0.02463554916390823,\n \"acc_norm\": 0.382051282051282,\n \"acc_norm_stderr\": 0.02463554916390823\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.337037037037037,\n \"acc_stderr\": 0.028820884666253252,\n \"acc_norm\": 0.337037037037037,\n \"acc_norm_stderr\": 0.028820884666253252\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.39915966386554624,\n \"acc_stderr\": 0.03181110032413925,\n \"acc_norm\": 0.39915966386554624,\n \"acc_norm_stderr\": 0.03181110032413925\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.2980132450331126,\n \"acc_stderr\": 0.037345356767871984,\n \"acc_norm\": 0.2980132450331126,\n \"acc_norm_stderr\": 0.037345356767871984\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.5302752293577981,\n \"acc_stderr\": 0.02139798860493697,\n \"acc_norm\": 0.5302752293577981,\n \"acc_norm_stderr\": 0.02139798860493697\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.4027777777777778,\n \"acc_stderr\": 0.033448873829978666,\n \"acc_norm\": 0.4027777777777778,\n \"acc_norm_stderr\": 0.033448873829978666\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.44607843137254904,\n \"acc_stderr\": 0.03488845451304974,\n \"acc_norm\": 0.44607843137254904,\n \"acc_norm_stderr\": 0.03488845451304974\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.5147679324894515,\n \"acc_stderr\": 0.032533028078777386,\n \"acc_norm\": 0.5147679324894515,\n \"acc_norm_stderr\": 0.032533028078777386\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.49327354260089684,\n \"acc_stderr\": 0.03355476596234354,\n \"acc_norm\": 0.49327354260089684,\n \"acc_norm_stderr\": 0.03355476596234354\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.5190839694656488,\n \"acc_stderr\": 0.043820947055509867,\n \"acc_norm\": 0.5190839694656488,\n \"acc_norm_stderr\": 0.043820947055509867\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.5454545454545454,\n \"acc_stderr\": 0.04545454545454548,\n \"acc_norm\": 0.5454545454545454,\n \"acc_norm_stderr\": 0.04545454545454548\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.46296296296296297,\n \"acc_stderr\": 0.04820403072760627,\n \"acc_norm\": 0.46296296296296297,\n \"acc_norm_stderr\": 0.04820403072760627\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.44171779141104295,\n \"acc_stderr\": 0.039015918258361836,\n \"acc_norm\": 0.44171779141104295,\n \"acc_norm_stderr\": 0.039015918258361836\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.3482142857142857,\n \"acc_stderr\": 0.04521829902833585,\n \"acc_norm\": 0.3482142857142857,\n \"acc_norm_stderr\": 0.04521829902833585\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.6019417475728155,\n \"acc_stderr\": 0.04846748253977239,\n \"acc_norm\": 0.6019417475728155,\n \"acc_norm_stderr\": 0.04846748253977239\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.6965811965811965,\n \"acc_stderr\": 0.030118210106942662,\n \"acc_norm\": 0.6965811965811965,\n \"acc_norm_stderr\": 0.030118210106942662\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.45,\n \"acc_stderr\": 0.05,\n \"acc_norm\": 0.45,\n \"acc_norm_stderr\": 0.05\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.4789272030651341,\n \"acc_stderr\": 0.017864076786212903,\n \"acc_norm\": 0.4789272030651341,\n \"acc_norm_stderr\": 0.017864076786212903\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.45375722543352603,\n \"acc_stderr\": 0.026803720583206177,\n \"acc_norm\": 0.45375722543352603,\n \"acc_norm_stderr\": 0.026803720583206177\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.3418994413407821,\n \"acc_stderr\": 0.015864506461604633,\n \"acc_norm\": 0.3418994413407821,\n \"acc_norm_stderr\": 0.015864506461604633\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.46405228758169936,\n \"acc_stderr\": 0.028555827516528784,\n \"acc_norm\": 0.46405228758169936,\n \"acc_norm_stderr\": 0.028555827516528784\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.42765273311897106,\n \"acc_stderr\": 0.02809924077580956,\n \"acc_norm\": 0.42765273311897106,\n \"acc_norm_stderr\": 0.02809924077580956\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.4166666666666667,\n \"acc_stderr\": 0.027431623722415005,\n \"acc_norm\": 0.4166666666666667,\n \"acc_norm_stderr\": 0.027431623722415005\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.3546099290780142,\n \"acc_stderr\": 0.028538650028878634,\n \"acc_norm\": 0.3546099290780142,\n \"acc_norm_stderr\": 0.028538650028878634\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.32529335071707954,\n \"acc_stderr\": 0.01196531153657153,\n \"acc_norm\": 0.32529335071707954,\n \"acc_norm_stderr\": 0.01196531153657153\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.4264705882352941,\n \"acc_stderr\": 0.030042615832714874,\n \"acc_norm\": 0.4264705882352941,\n \"acc_norm_stderr\": 0.030042615832714874\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.36764705882352944,\n \"acc_stderr\": 0.019506291693954854,\n \"acc_norm\": 0.36764705882352944,\n \"acc_norm_stderr\": 0.019506291693954854\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.509090909090909,\n \"acc_stderr\": 0.0478833976870286,\n \"acc_norm\": 0.509090909090909,\n \"acc_norm_stderr\": 0.0478833976870286\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.5591836734693878,\n \"acc_stderr\": 0.03178419114175363,\n \"acc_norm\": 0.5591836734693878,\n \"acc_norm_stderr\": 0.03178419114175363\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.5223880597014925,\n \"acc_stderr\": 0.03531987930208731,\n \"acc_norm\": 0.5223880597014925,\n \"acc_norm_stderr\": 0.03531987930208731\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.59,\n \"acc_stderr\": 0.04943110704237101,\n \"acc_norm\": 0.59,\n \"acc_norm_stderr\": 0.04943110704237101\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.3614457831325301,\n \"acc_stderr\": 0.0374005938202932,\n \"acc_norm\": 0.3614457831325301,\n \"acc_norm_stderr\": 0.0374005938202932\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.42105263157894735,\n \"acc_stderr\": 0.03786720706234214,\n \"acc_norm\": 0.42105263157894735,\n \"acc_norm_stderr\": 0.03786720706234214\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.2692778457772338,\n \"mc1_stderr\": 0.015528566637087283,\n \"mc2\": 0.42262985236898787,\n \"mc2_stderr\": 0.014934468412056808\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.6306235201262825,\n \"acc_stderr\": 0.013564470596053523\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.3980288097043215,\n \"acc_stderr\": 0.013483026939074817\n }\n}\n```", "repo_url": "https://huggingface.co/rombodawg/Everyone-Coder-33b-Base", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_01_25T11_38_57.857579", "path": ["**/details_harness|arc:challenge|25_2024-01-25T11-38-57.857579.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-01-25T11-38-57.857579.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_01_25T11_38_57.857579", "path": ["**/details_harness|gsm8k|5_2024-01-25T11-38-57.857579.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-01-25T11-38-57.857579.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_01_25T11_38_57.857579", "path": ["**/details_harness|hellaswag|10_2024-01-25T11-38-57.857579.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-01-25T11-38-57.857579.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_01_25T11_38_57.857579", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-25T11-38-57.857579.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-25T11-38-57.857579.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-25T11-38-57.857579.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-25T11-38-57.857579.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-25T11-38-57.857579.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-25T11-38-57.857579.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-25T11-38-57.857579.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-25T11-38-57.857579.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-25T11-38-57.857579.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-25T11-38-57.857579.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-25T11-38-57.857579.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-25T11-38-57.857579.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-25T11-38-57.857579.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-25T11-38-57.857579.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-25T11-38-57.857579.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-25T11-38-57.857579.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-25T11-38-57.857579.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-25T11-38-57.857579.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-25T11-38-57.857579.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-25T11-38-57.857579.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-25T11-38-57.857579.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-25T11-38-57.857579.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-25T11-38-57.857579.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-25T11-38-57.857579.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-25T11-38-57.857579.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-25T11-38-57.857579.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-25T11-38-57.857579.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-25T11-38-57.857579.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-25T11-38-57.857579.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-25T11-38-57.857579.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-25T11-38-57.857579.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-25T11-38-57.857579.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-25T11-38-57.857579.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-25T11-38-57.857579.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-25T11-38-57.857579.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-25T11-38-57.857579.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-25T11-38-57.857579.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-25T11-38-57.857579.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-25T11-38-57.857579.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-25T11-38-57.857579.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-25T11-38-57.857579.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-25T11-38-57.857579.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-25T11-38-57.857579.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-25T11-38-57.857579.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-25T11-38-57.857579.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-25T11-38-57.857579.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-25T11-38-57.857579.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-25T11-38-57.857579.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-25T11-38-57.857579.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-25T11-38-57.857579.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-25T11-38-57.857579.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-25T11-38-57.857579.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-25T11-38-57.857579.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-25T11-38-57.857579.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-25T11-38-57.857579.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-25T11-38-57.857579.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-25T11-38-57.857579.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-25T11-38-57.857579.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-25T11-38-57.857579.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-25T11-38-57.857579.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-25T11-38-57.857579.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-25T11-38-57.857579.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-25T11-38-57.857579.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-25T11-38-57.857579.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-25T11-38-57.857579.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-25T11-38-57.857579.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-25T11-38-57.857579.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-25T11-38-57.857579.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-25T11-38-57.857579.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-25T11-38-57.857579.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-25T11-38-57.857579.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-25T11-38-57.857579.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-25T11-38-57.857579.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-25T11-38-57.857579.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-25T11-38-57.857579.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-25T11-38-57.857579.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-25T11-38-57.857579.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-25T11-38-57.857579.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-25T11-38-57.857579.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-25T11-38-57.857579.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-25T11-38-57.857579.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-25T11-38-57.857579.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-25T11-38-57.857579.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-25T11-38-57.857579.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-25T11-38-57.857579.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-25T11-38-57.857579.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-25T11-38-57.857579.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-25T11-38-57.857579.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-25T11-38-57.857579.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-25T11-38-57.857579.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-25T11-38-57.857579.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-25T11-38-57.857579.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-25T11-38-57.857579.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-25T11-38-57.857579.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-25T11-38-57.857579.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-25T11-38-57.857579.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-25T11-38-57.857579.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-25T11-38-57.857579.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-25T11-38-57.857579.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-25T11-38-57.857579.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-25T11-38-57.857579.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-25T11-38-57.857579.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-25T11-38-57.857579.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-25T11-38-57.857579.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-25T11-38-57.857579.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-25T11-38-57.857579.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-25T11-38-57.857579.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-25T11-38-57.857579.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-25T11-38-57.857579.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-25T11-38-57.857579.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-25T11-38-57.857579.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-25T11-38-57.857579.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-25T11-38-57.857579.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-25T11-38-57.857579.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_01_25T11_38_57.857579", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-25T11-38-57.857579.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-25T11-38-57.857579.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_01_25T11_38_57.857579", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-25T11-38-57.857579.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-25T11-38-57.857579.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_01_25T11_38_57.857579", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-25T11-38-57.857579.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-25T11-38-57.857579.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_01_25T11_38_57.857579", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-25T11-38-57.857579.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-25T11-38-57.857579.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_01_25T11_38_57.857579", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-25T11-38-57.857579.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-25T11-38-57.857579.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_01_25T11_38_57.857579", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-25T11-38-57.857579.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-25T11-38-57.857579.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_01_25T11_38_57.857579", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-25T11-38-57.857579.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-25T11-38-57.857579.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_01_25T11_38_57.857579", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-25T11-38-57.857579.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-25T11-38-57.857579.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_01_25T11_38_57.857579", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-25T11-38-57.857579.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-25T11-38-57.857579.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_01_25T11_38_57.857579", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-25T11-38-57.857579.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-25T11-38-57.857579.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_01_25T11_38_57.857579", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-25T11-38-57.857579.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-25T11-38-57.857579.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_01_25T11_38_57.857579", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-25T11-38-57.857579.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-25T11-38-57.857579.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_01_25T11_38_57.857579", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-25T11-38-57.857579.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-25T11-38-57.857579.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_01_25T11_38_57.857579", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-25T11-38-57.857579.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-25T11-38-57.857579.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_01_25T11_38_57.857579", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-25T11-38-57.857579.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-25T11-38-57.857579.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_01_25T11_38_57.857579", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-25T11-38-57.857579.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-25T11-38-57.857579.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_01_25T11_38_57.857579", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-25T11-38-57.857579.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-25T11-38-57.857579.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_01_25T11_38_57.857579", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-25T11-38-57.857579.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-25T11-38-57.857579.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_01_25T11_38_57.857579", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-25T11-38-57.857579.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-25T11-38-57.857579.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_01_25T11_38_57.857579", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-25T11-38-57.857579.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-25T11-38-57.857579.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_01_25T11_38_57.857579", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-25T11-38-57.857579.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-25T11-38-57.857579.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_01_25T11_38_57.857579", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-25T11-38-57.857579.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-25T11-38-57.857579.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_01_25T11_38_57.857579", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-25T11-38-57.857579.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-25T11-38-57.857579.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_01_25T11_38_57.857579", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-25T11-38-57.857579.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-25T11-38-57.857579.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_01_25T11_38_57.857579", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-25T11-38-57.857579.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-25T11-38-57.857579.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_01_25T11_38_57.857579", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-25T11-38-57.857579.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-25T11-38-57.857579.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_01_25T11_38_57.857579", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-25T11-38-57.857579.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-25T11-38-57.857579.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_01_25T11_38_57.857579", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-25T11-38-57.857579.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-25T11-38-57.857579.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_01_25T11_38_57.857579", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-25T11-38-57.857579.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-25T11-38-57.857579.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_01_25T11_38_57.857579", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-25T11-38-57.857579.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-25T11-38-57.857579.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_01_25T11_38_57.857579", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-25T11-38-57.857579.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-25T11-38-57.857579.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_01_25T11_38_57.857579", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-25T11-38-57.857579.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-25T11-38-57.857579.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_01_25T11_38_57.857579", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-25T11-38-57.857579.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-25T11-38-57.857579.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_01_25T11_38_57.857579", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-25T11-38-57.857579.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-25T11-38-57.857579.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_01_25T11_38_57.857579", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-25T11-38-57.857579.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-25T11-38-57.857579.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_01_25T11_38_57.857579", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-25T11-38-57.857579.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-25T11-38-57.857579.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_01_25T11_38_57.857579", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-25T11-38-57.857579.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-25T11-38-57.857579.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_01_25T11_38_57.857579", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-25T11-38-57.857579.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-25T11-38-57.857579.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_01_25T11_38_57.857579", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-25T11-38-57.857579.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-25T11-38-57.857579.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_01_25T11_38_57.857579", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-25T11-38-57.857579.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-25T11-38-57.857579.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_01_25T11_38_57.857579", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-25T11-38-57.857579.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-25T11-38-57.857579.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_01_25T11_38_57.857579", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-25T11-38-57.857579.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-25T11-38-57.857579.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_01_25T11_38_57.857579", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-25T11-38-57.857579.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-25T11-38-57.857579.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_01_25T11_38_57.857579", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-25T11-38-57.857579.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-25T11-38-57.857579.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_01_25T11_38_57.857579", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-25T11-38-57.857579.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-25T11-38-57.857579.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_01_25T11_38_57.857579", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-25T11-38-57.857579.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-25T11-38-57.857579.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_01_25T11_38_57.857579", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-25T11-38-57.857579.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-25T11-38-57.857579.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_01_25T11_38_57.857579", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-25T11-38-57.857579.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-25T11-38-57.857579.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_01_25T11_38_57.857579", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-25T11-38-57.857579.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-25T11-38-57.857579.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_01_25T11_38_57.857579", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-25T11-38-57.857579.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-25T11-38-57.857579.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_01_25T11_38_57.857579", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-25T11-38-57.857579.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-25T11-38-57.857579.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_01_25T11_38_57.857579", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-25T11-38-57.857579.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-25T11-38-57.857579.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_01_25T11_38_57.857579", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-25T11-38-57.857579.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-25T11-38-57.857579.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_01_25T11_38_57.857579", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-25T11-38-57.857579.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-25T11-38-57.857579.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_01_25T11_38_57.857579", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-25T11-38-57.857579.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-25T11-38-57.857579.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_01_25T11_38_57.857579", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-25T11-38-57.857579.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-25T11-38-57.857579.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_01_25T11_38_57.857579", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-25T11-38-57.857579.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-25T11-38-57.857579.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_01_25T11_38_57.857579", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-25T11-38-57.857579.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-25T11-38-57.857579.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_01_25T11_38_57.857579", "path": ["**/details_harness|winogrande|5_2024-01-25T11-38-57.857579.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-01-25T11-38-57.857579.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_01_25T11_38_57.857579", "path": ["results_2024-01-25T11-38-57.857579.parquet"]}, {"split": "latest", "path": ["results_2024-01-25T11-38-57.857579.parquet"]}]}]}
2024-01-25T11:41:49+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of rombodawg/Everyone-Coder-33b-Base Dataset automatically created during the evaluation run of model rombodawg/Everyone-Coder-33b-Base on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-01-25T11:38:57.857579(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of rombodawg/Everyone-Coder-33b-Base\n\n\n\nDataset automatically created during the evaluation run of model rombodawg/Everyone-Coder-33b-Base on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-25T11:38:57.857579(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of rombodawg/Everyone-Coder-33b-Base\n\n\n\nDataset automatically created during the evaluation run of model rombodawg/Everyone-Coder-33b-Base on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-25T11:38:57.857579(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ 6, 191, 68, 4, 40, 29, 3, 4, 9, 6, 5, 7, 4, 7, 10, 9, 5, 9, 8, 10, 46, 8, 7, 10, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of rombodawg/Everyone-Coder-33b-Base\n\n\n\nDataset automatically created during the evaluation run of model rombodawg/Everyone-Coder-33b-Base on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2024-01-25T11:38:57.857579(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):## Dataset Details### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:## Uses### Direct Use### Out-of-Scope Use## Dataset Structure## Dataset Creation### Curation Rationale### Source Data#### Data Collection and Processing#### Who are the source data producers?### Annotations [optional]#### Annotation process#### Who are the annotators?#### Personal and Sensitive Information## Bias, Risks, and Limitations### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:## Glossary [optional]## More Information [optional]## Dataset Card Authors [optional]" ]
[ -0.04611409455537796, 0.2169811725616455, -0.004157989285886288, 0.03926503285765648, 0.08761829137802124, -0.009388796985149384, 0.009293071925640106, 0.12003183364868164, -0.020942194387316704, 0.18414342403411865, -0.022908108308911324, 0.09711680561304092, 0.07895965874195099, 0.11725287139415741, 0.01893480308353901, -0.12716558575630188, 0.016895828768610954, -0.07088623195886612, 0.08338092267513275, 0.08237504959106445, 0.08157213777303696, -0.07906577736139297, 0.058984555304050446, -0.046634428203105927, -0.012347247451543808, -0.0035884056705981493, -0.09571796655654907, -0.04316205531358719, 0.08613096922636032, 0.09074588865041733, 0.042313750833272934, -0.012725788168609142, 0.01903894543647766, -0.260379433631897, 0.012361425906419754, 0.09473191946744919, -0.008418820798397064, 0.04247269034385681, 0.11983628571033478, -0.07906495779752731, 0.05759471282362938, -0.05979572609066963, 0.06673695147037506, 0.05301782861351967, -0.1273692101240158, -0.13906016945838928, -0.14122945070266724, 0.00790311023592949, 0.06971242278814316, 0.04495041072368622, -0.02603304572403431, 0.15662692487239838, -0.028107084333896637, 0.04902537539601326, 0.13848374783992767, -0.11326415091753006, -0.023344583809375763, 0.07871898263692856, 0.04304926469922066, 0.06772962212562561, -0.09085197001695633, -0.009773020632565022, 0.04224814847111702, 0.05012703314423561, 0.021564429625868797, 0.007158723659813404, -0.016461290419101715, 0.013381382450461388, -0.1473866105079651, -0.12628191709518433, 0.184977725148201, 0.014669929631054401, -0.038965120911598206, -0.17837688326835632, -0.028557080775499344, -0.004552025813609362, -0.0033145048655569553, -0.04358873888850212, 0.004012217279523611, -0.01193385012447834, 0.0812854990363121, -0.007685582619160414, -0.09584569185972214, -0.021501222625374794, 0.012085603550076485, 0.046583570539951324, 0.01668359711766243, -0.018907684832811356, -0.003881918964907527, 0.11559288203716278, -0.01703629642724991, -0.08938025683164597, -0.07058694213628769, -0.0548069067299366, -0.08985288441181183, -0.04460941255092621, 0.017607837915420532, -0.060216620564460754, 0.04468151181936264, 0.23288209736347198, -0.018396463245153427, 0.034542329609394073, -0.11616266518831253, 0.013859125785529613, 0.12359773367643356, 0.053778789937496185, -0.06223316863179207, -0.0720483586192131, -0.014539847150444984, 0.02379581891000271, 0.03909363970160484, -0.020970771089196205, 0.014788037165999413, 0.07185254245996475, 0.05335162580013275, 0.11391590535640717, 0.13084958493709564, 0.03209499269723892, -0.07327420264482498, -0.02432962693274021, 0.2334495633840561, -0.1436881422996521, -0.012880525551736355, 0.007309418637305498, -0.03484918177127838, -0.1261574774980545, 0.09540867805480957, 0.006652671843767166, -0.04926176741719246, 0.10622440278530121, -0.047994568943977356, -0.08658934384584427, -0.07537995278835297, -0.051983486860990524, 0.06314664334058762, -0.0032523954287171364, -0.03517594560980797, -0.08008456975221634, -0.06938965618610382, -0.08346659690141678, 0.018179474398493767, -0.06385969370603561, -0.025857850909233093, 0.028174668550491333, -0.020622970536351204, -0.018011750653386116, -0.011505971662700176, 0.09080858528614044, -0.055635690689086914, 0.03406393900513649, 0.027213415130972862, 0.015157390385866165, 0.0797300636768341, 0.05173397436738014, -0.1270333230495453, 0.09019012749195099, -0.13826707005500793, 0.09193005412817001, -0.11501577496528625, -0.012470345012843609, -0.14291328191757202, -0.01378400344401598, -0.03056093491613865, 0.01650496944785118, -0.005280321929603815, 0.09579883515834808, -0.20948268473148346, 0.015790728852152824, 0.14926402270793915, -0.10625186562538147, -0.11407838761806488, 0.06562530994415283, -0.04571381211280823, 0.07786549627780914, 0.046441949903964996, 0.10198413580656052, 0.11660539358854294, -0.05653063580393791, -0.11777729541063309, -0.0800151601433754, -0.029779426753520966, 0.14618146419525146, 0.0697464570403099, -0.0749887079000473, 0.12132230401039124, 0.041237834841012955, -0.006117614917457104, -0.10425931960344315, -0.012302559800446033, -0.0631323754787445, -0.024358442053198814, -0.05019034445285797, -0.08088645339012146, 0.0020618790294975042, -0.08789476007223129, -0.02344505675137043, -0.09314883500337601, 0.0065271384082734585, 0.08896668255329132, -0.027071664109826088, 0.015227754600346088, -0.06706297397613525, 0.046469464898109436, -0.0006367948953993618, 0.02329264022409916, -0.2146480232477188, -0.10183224081993103, 0.033703744411468506, -0.12626832723617554, 0.04552348330616951, 0.03264864161610603, 0.010984248481690884, 0.04027080163359642, -0.010957523249089718, 0.019127478823065758, 0.01261674240231514, -0.0010529288556426764, -0.017377715557813644, -0.14241865277290344, -0.06540553271770477, -0.08348798751831055, 0.08943011611700058, -0.13131533563137054, -0.011955685913562775, 0.08253943920135498, 0.17223957180976868, 0.0029873144812881947, -0.0855904296040535, 0.07600764185190201, 0.0018073336686939, -0.0449950285255909, -0.05797526612877846, 0.011576015502214432, -0.02343093603849411, 0.03263410925865173, 0.015354358591139317, -0.21396441757678986, -0.14727294445037842, 0.08054844290018082, 0.11091455072164536, -0.06727027148008347, -0.08169389516115189, -0.066187284886837, -0.06611863523721695, -0.07237028330564499, -0.07976984977722168, 0.06326860934495926, 0.07253146916627884, 0.043069396167993546, -0.06839773058891296, -0.06347519159317017, 0.01760135404765606, 0.048682115972042084, -0.07255340367555618, 0.10197628289461136, 0.0726795569062233, -0.08382634073495865, 0.10476341098546982, 0.0004115132614970207, 0.13143347203731537, 0.060287199914455414, 0.028229430317878723, -0.08970175683498383, -0.0033540199510753155, 0.05806812271475792, 0.043737396597862244, 0.05872117727994919, -0.06021488085389137, 0.02853768691420555, 0.07684168219566345, -0.00025038860621862113, 0.04506589099764824, -0.05395951494574547, 0.03419819846749306, 0.04441544786095619, 0.008555098436772823, 0.034641992300748825, 0.004577173851430416, -0.005890968255698681, 0.06639233976602554, 0.03560439124703407, 0.0893997773528099, -0.011517348699271679, -0.048356592655181885, -0.09605444967746735, 0.12944424152374268, -0.07966656982898712, -0.2713238298892975, -0.1588640809059143, -0.03677307441830635, -0.04387463629245758, -0.00924934446811676, 0.07426430284976959, -0.008019998669624329, -0.09188883006572723, -0.10174602270126343, 0.04310833662748337, -0.002026822417974472, -0.1291276216506958, -0.03757834434509277, 0.06193017587065697, 0.011706158518791199, -0.17350614070892334, 0.03742467239499092, 0.0582132488489151, -0.05714816600084305, 0.0024882222060114145, 0.08272422105073929, 0.1458902806043625, 0.077475406229496, 0.05801686272025108, -0.028619881719350815, -0.005404672585427761, 0.18310919404029846, -0.10285380482673645, 0.02922125905752182, 0.12416327744722366, -0.05516055226325989, 0.0703505128622055, 0.16041766107082367, 0.01068610418587923, -0.10886083543300629, 0.05458869785070419, 0.08891867101192474, -0.07431846857070923, -0.2385757863521576, -0.10637333989143372, -0.016639338806271553, 0.011358985677361488, 0.10164235532283783, 0.05767478421330452, 0.0032520948443561792, 0.015104269608855247, -0.11530345678329468, -0.03033459186553955, -0.0585552453994751, 0.08642711490392685, 0.05031520500779152, -0.014819303527474403, 0.05209975317120552, -0.03954324126243591, 0.020630542188882828, 0.11257146298885345, 0.032735276967287064, 0.13135866820812225, -0.030010119080543518, 0.17198650538921356, 0.08912380039691925, 0.1130102276802063, -0.04548964276909828, 0.04560320824384689, 0.003669004887342453, 0.07091249525547028, -0.012743539176881313, -0.10370460152626038, -0.06119409576058388, 0.09253330528736115, 0.0024292543530464172, -0.07315263897180557, 0.020306549966335297, -0.06329870969057083, 0.03829188272356987, 0.18999409675598145, -0.019380776211619377, -0.15383875370025635, -0.05334380641579628, 0.05962280556559563, -0.020974691957235336, -0.09062818437814713, -0.024277174845337868, 0.053693000227212906, -0.13942694664001465, 0.026325218379497528, -0.025535758584737778, 0.0840114951133728, -0.12958556413650513, -0.015118374489247799, -0.021786270663142204, 0.0385587215423584, 0.0027264237869530916, 0.12267401069402695, -0.12561102211475372, 0.10532380640506744, 0.006622795481234789, 0.010169610381126404, -0.11173158884048462, 0.050610821694135666, -0.055872075259685516, -0.048600874841213226, 0.14039315283298492, -0.01935531757771969, -0.08939768373966217, -0.06467464566230774, -0.1153712198138237, -0.0031647055875509977, 0.08330225199460983, -0.14166978001594543, 0.11605667322874069, 0.030943773686885834, -0.013751274906098843, -0.018084794282913208, -0.0030902985017746687, -0.1175321415066719, -0.2190205603837967, 0.11085925251245499, -0.09924211353063583, 0.06703893840312958, -0.05280718207359314, -0.03575586900115013, -0.04557260870933533, 0.18587055802345276, -0.0856611356139183, -0.04567662999033928, -0.11573668569326401, 0.037703435868024826, 0.17075228691101074, -0.04464246332645416, 0.05105745047330856, -0.04987076297402382, 0.17486295104026794, 0.002609599381685257, -0.047858208417892456, 0.00030219455948099494, -0.09046822041273117, -0.1698383241891861, -0.04202030971646309, 0.14352402091026306, 0.05691404640674591, 0.010688690468668938, 0.007533997762948275, 0.04502066969871521, 0.024022458121180534, -0.09136272966861725, 0.03732915222644806, 0.10194569081068039, 0.12840713560581207, 0.03062089905142784, -0.03387841954827309, -0.07871859520673752, -0.10094305127859116, -0.09824755787849426, 0.06375332921743393, 0.15569880604743958, -0.06563935428857803, 0.1482744663953781, 0.125480055809021, -0.10496453940868378, -0.1979219913482666, -0.05795734375715256, 0.0221022367477417, -0.026998508721590042, 0.12159016728401184, -0.18564142286777496, 0.08303322643041611, 0.05757397040724754, -0.015591969713568687, 0.0737793892621994, -0.2375209778547287, -0.12996786832809448, 0.019988909363746643, 0.02374582551419735, -0.22501049935817719, -0.18095912039279938, -0.10859295725822449, -0.038283463567495346, -0.1629735678434372, 0.12209470570087433, -0.024958457797765732, 0.031108062714338303, -0.013424457050859928, 0.052849892526865005, 0.05715873837471008, -0.06581980735063553, 0.13166408240795135, -0.0018969965167343616, 0.01669306308031082, -0.11308130621910095, -0.010461082682013512, 0.02434350736439228, -0.04773866385221481, 0.09982182830572128, 0.03222696855664253, 0.059611838310956955, -0.09231168031692505, -0.03360909968614578, -0.052892349660396576, 0.048016250133514404, -0.07345616817474365, -0.05240517482161522, -0.06954266875982285, 0.07181891798973083, 0.0809994712471962, -0.01553450059145689, 0.04042477160692215, -0.032017890363931656, 0.04869583621621132, 0.24510516226291656, 0.11921748518943787, 0.04591870680451393, -0.11619572341442108, -0.02243408001959324, -0.010365179739892483, -0.008309029042720795, -0.14363881945610046, 0.043129872530698776, 0.08809814602136612, 0.045866020023822784, 0.071782186627388, -0.022811420261859894, -0.19491057097911835, -0.0035942811518907547, 0.09134465456008911, -0.11238221824169159, -0.21526622772216797, 0.029008660465478897, 0.13751627504825592, -0.1675294190645218, -0.05101083964109421, 0.09863071888685226, 0.018414385616779327, -0.03293320909142494, -0.001531684771180153, 0.08317606151103973, 0.0388033427298069, 0.09775734692811966, 0.00398658774793148, 0.05147632956504822, -0.07238295674324036, 0.10906466096639633, 0.15350745618343353, -0.1198643371462822, 0.03181272745132446, 0.046216730028390884, -0.05329921096563339, -0.06729148328304291, 0.030460258945822716, 0.01721242256462574, 0.011192284524440765, -0.03805455565452576, 0.018272249028086662, 0.00622580386698246, 0.03790050745010376, 0.12240538746118546, 0.011539432220160961, 0.04566871002316475, 0.03716494143009186, -0.0019303896697238088, -0.08511728048324585, 0.08683661371469498, 0.03076917491853237, 0.04316171258687973, -0.04281469061970711, 0.03099886141717434, 0.020856786519289017, -0.011751963756978512, 0.01211666688323021, -0.03706653416156769, -0.061345357447862625, -0.00313123082742095, -0.14553536474704742, 0.029160283505916595, -0.07067906856536865, -0.0025437488220632076, -0.008588231168687344, -0.016575003042817116, -0.015673687681555748, 0.01005625445395708, -0.05680391937494278, -0.06840357184410095, -0.04922270029783249, 0.12882843613624573, -0.2033374309539795, -0.003093936014920473, 0.09606993198394775, -0.06000330299139023, 0.074582040309906, -0.0069105615839362144, -0.013250512070953846, 0.01179297175258398, -0.06788523495197296, -0.0014794953167438507, -0.0145333306863904, 0.041195642203092575, 0.009294797666370869, -0.15406405925750732, -0.014660446904599667, 0.007127104792743921, -0.08949221670627594, -0.005649177823215723, 0.0663798451423645, -0.16484913229942322, 0.006002552341669798, 0.06059093773365021, -0.04012313485145569, -0.04597027972340584, 0.042323242872953415, 0.05971378833055496, 0.010328251868486404, 0.10060034692287445, -0.0014462705003097653, 0.042696695774793625, -0.16562245786190033, -0.05136065185070038, -0.0075347088277339935, 0.0015015695244073868, 0.03150670975446701, 0.029655013233423233, 0.03769000992178917, -0.009153762832283974, 0.20347927510738373, -0.005146020092070103, 0.08229198306798935, 0.031703922897577286, -0.001133951940573752, -0.03534061461687088, 0.025577323511242867, 0.03447870910167694, 0.010625334456562996, 0.024457525461912155, 0.035345256328582764, -0.01060369424521923, -0.043272506445646286, -0.052913349121809006, 0.044732943177223206, 0.15961767733097076, 0.15643908083438873, -0.05237440764904022, 0.08544836938381195, -0.16565608978271484, -0.05501897260546684, 0.034773800522089005, -0.017297685146331787, 0.048532892018556595, -0.07662217319011688, 0.023923031985759735, 0.06361815333366394, -0.09313654154539108, 0.13794216513633728, -0.0834447517991066, -0.03555837273597717, -0.03281993418931961, -0.13421809673309326, -0.048262495547533035, 0.0029021278023719788, 0.010629470460116863, -0.10520777851343155, 0.09814951568841934, 0.12898747622966766, -0.025502687320113182, -0.015133959241211414, 0.10448454320430756, -0.07499533146619797, -0.06522668898105621, -0.036680467426776886, 0.004238756373524666, 0.014774952083826065, 0.0008644827175885439, 0.08129516243934631, 0.020499136298894882, 0.08719521015882492, 0.07103640586137772, 0.09684418886899948, 0.04620909318327904, 0.013560712337493896, -0.053820911794900894, -0.07682565599679947, -0.005838198587298393, -0.008208035491406918, -0.04574287310242653, 0.20281338691711426, 0.04497929662466049, 0.017024563625454903, 0.006270238664001226, 0.2158460170030594, 0.002286595990881324, -0.04563624784350395, -0.13186194002628326, 0.10490632802248001, -0.005449374672025442, 0.01663190685212612, 0.019833756610751152, -0.14029881358146667, 0.029413772746920586, 0.16104158759117126, 0.11095988005399704, 0.04788126423954964, 0.012279761023819447, 0.028560696169734, 0.027349110692739487, -0.020124202594161034, 0.024905337020754814, 0.05122065171599388, 0.1566726416349411, -0.06552774459123611, 0.05533428490161896, -0.015626374632120132, -0.01059025339782238, -0.016780326142907143, 0.08299578726291656, -0.041688546538352966, 0.020815463736653328, -0.05084379017353058, 0.10402560979127884, -0.03202259540557861, -0.27745217084884644, -0.024940812960267067, -0.10787259042263031, -0.12300571799278259, -0.01886696368455887, 0.031194305047392845, -0.028533557429909706, 0.044520001858472824, 0.043779484927654266, -0.023326076567173004, 0.1918414980173111, 0.00918071810156107, -0.08147234469652176, -0.061812665313482285, 0.06010347977280617, -0.026834601536393166, 0.25237104296684265, -0.00911113154143095, 0.05889677256345749, 0.10442125052213669, -0.01972048170864582, -0.15205006301403046, 0.014306074008345604, 0.10304729640483856, -0.02480844035744667, 0.056076932698488235, 0.1678272932767868, -0.03224613517522812, 0.1346680223941803, 0.04937608167529106, -0.0504123754799366, 0.04668944329023361, 0.0651116892695427, 0.053428635001182556, -0.10425859689712524, 0.07871074229478836, -0.08835893124341965, 0.1469535082578659, 0.11865463107824326, -0.03719433769583702, -0.005621468648314476, -0.06722493469715118, 0.06545529514551163, -0.028649011626839638, 0.11482051759958267, -0.006411688402295113, -0.17031551897525787, 0.034819286316633224, 0.029819030314683914, 0.048960573971271515, -0.22660869359970093, -0.0698607787489891, 0.14183257520198822, -0.03913366422057152, 0.0014385692775249481, 0.08805697411298752, 0.04462926462292671, 0.008113093674182892, -0.07228869944810867, -0.0881652683019638, -0.007042572367936373, 0.12505950033664703, -0.09905392676591873, -0.045729272067546844 ]
8451eaf3fa1b6983d9b0c349476c9bf4c9d5f375
The set contains 2,391,206 pragraphs of prompt/result combinations, where the prompt is a paragraph from Dutch Wikipedia and the result is a simplified text, which could include more than one paragraph. This dataset was created by UWV, as a part of project "Leesplank", an effort to generate datasets that are ethically and legally sound. The basis of this dataset was the wikipedia extract as a part of Gigacorpus. The lines were fed one by one into GPT 4 1116 preview, where the filtering level was the strictest. This means that this dataset is cleaned by the [OpenAI filtering standards](https://learn.microsoft.com/en-us/azure/ai-services/openai/concepts/content-filter). The prompt used was: "Simplify a Dutch paragraph directly into a single, clear, and engaging text suitable for adult readers that speak Dutch as a second language, using words from the 'basiswoordenlijst Amsterdamse kleuters.' Maintain direct quotes, simplify dialogue, explain cultural references, idioms, and technical terms naturally within the text. Adjust the order of information for improved simplicity, engagement, and readability. Attempt to not use any commas or diminutives." The prompt was used as a system prompt. Hyperparameters were: Temperature 0.5, top_P: 0.2 After generation, the resulting prompt/result combinations were filtered where items that had a ratio prompt/result length greater than 4 or smaller than 0.25 were deemed "bad" and are not included in the set. These 2.391,206 paragraphs are not yet all of wikipedia; there are about 1,000,000 paragraphs left to process. Another round of processing is required to filter out items that originated as map descriptions.
UWV/Leesplank_NL_wikipedia_simplifications
[ "task_categories:text-generation", "task_categories:text-classification", "task_categories:translation", "task_categories:text2text-generation", "size_categories:1M<n<10M", "language:nl", "license:cc-by-nc-4.0", "region:us" ]
2024-01-25T12:28:54+00:00
{"language": ["nl"], "license": "cc-by-nc-4.0", "size_categories": ["1M<n<10M"], "task_categories": ["text-generation", "text-classification", "translation", "text2text-generation"], "pretty_name": "Leesplank wikipedia vereenvoudigingen"}
2024-02-14T14:11:11+00:00
[]
[ "nl" ]
TAGS #task_categories-text-generation #task_categories-text-classification #task_categories-translation #task_categories-text2text-generation #size_categories-1M<n<10M #language-Dutch #license-cc-by-nc-4.0 #region-us
The set contains 2,391,206 pragraphs of prompt/result combinations, where the prompt is a paragraph from Dutch Wikipedia and the result is a simplified text, which could include more than one paragraph. This dataset was created by UWV, as a part of project "Leesplank", an effort to generate datasets that are ethically and legally sound. The basis of this dataset was the wikipedia extract as a part of Gigacorpus. The lines were fed one by one into GPT 4 1116 preview, where the filtering level was the strictest. This means that this dataset is cleaned by the OpenAI filtering standards. The prompt used was: "Simplify a Dutch paragraph directly into a single, clear, and engaging text suitable for adult readers that speak Dutch as a second language, using words from the 'basiswoordenlijst Amsterdamse kleuters.' Maintain direct quotes, simplify dialogue, explain cultural references, idioms, and technical terms naturally within the text. Adjust the order of information for improved simplicity, engagement, and readability. Attempt to not use any commas or diminutives." The prompt was used as a system prompt. Hyperparameters were: Temperature 0.5, top_P: 0.2 After generation, the resulting prompt/result combinations were filtered where items that had a ratio prompt/result length greater than 4 or smaller than 0.25 were deemed "bad" and are not included in the set. These 2.391,206 paragraphs are not yet all of wikipedia; there are about 1,000,000 paragraphs left to process. Another round of processing is required to filter out items that originated as map descriptions.
[]
[ "TAGS\n#task_categories-text-generation #task_categories-text-classification #task_categories-translation #task_categories-text2text-generation #size_categories-1M<n<10M #language-Dutch #license-cc-by-nc-4.0 #region-us \n" ]
[ 79 ]
[ "passage: TAGS\n#task_categories-text-generation #task_categories-text-classification #task_categories-translation #task_categories-text2text-generation #size_categories-1M<n<10M #language-Dutch #license-cc-by-nc-4.0 #region-us \n" ]
[ 0.012097056955099106, -0.07132735103368759, -0.004159265197813511, 0.01959564909338951, 0.01534702442586422, 0.056597135961055756, 0.192082479596138, 0.05767412111163139, 0.187757208943367, 0.024266673251986504, 0.07837484031915665, 0.02472662553191185, 0.04598816856741905, 0.010000248439610004, -0.04046238958835602, -0.2616019546985626, 0.08363252878189087, -0.07738186419010162, -0.044575516134500504, 0.07231008261442184, 0.11763758957386017, -0.03999115899205208, 0.05358338728547096, -0.07850705087184906, -0.0453304722905159, 0.027029525488615036, -0.0004680564161390066, -0.06683801859617233, 0.08262176066637039, 0.023269057273864746, 0.10500439256429672, 0.08712504059076309, -0.004233958199620247, -0.22356626391410828, -0.017584163695573807, -0.050351016223430634, -0.06594009697437286, 0.020136306062340736, 0.09391670674085617, 0.041911445558071136, 0.1275128275156021, -0.12132133543491364, -0.04280288890004158, 0.07585309445858002, -0.13978669047355652, -0.02900145947933197, -0.0613783597946167, 0.05968502163887024, 0.06104714050889015, 0.03040672279894352, -0.016215071082115173, 0.07884443551301956, -0.19460059702396393, 0.024694358929991722, 0.1010613813996315, -0.3110456168651581, -0.007201688829809427, 0.2504134178161621, 0.1312115490436554, 0.1526324599981308, -0.07922857254743576, 0.049263063818216324, 0.023786839097738266, 0.009632039815187454, -0.04963788017630577, -0.1500508338212967, -0.06363604217767715, 0.027549371123313904, -0.09620683640241623, -0.07192912697792053, 0.25261712074279785, 0.05698809027671814, 0.04095970094203949, -0.12580376863479614, -0.01964743435382843, -0.00007888099935371429, -0.05162815749645233, 0.047760505229234695, -0.0049569676630198956, 0.0314704068005085, 0.16181114315986633, -0.02678733505308628, -0.08460839837789536, -0.07293572276830673, -0.13599705696105957, 0.027603352442383766, -0.035480573773384094, -0.01999170146882534, -0.016289271414279938, 0.03170664608478546, -0.11601188033819199, -0.0568394660949707, -0.03143365681171417, -0.047588855028152466, -0.07616209983825684, -0.004545476753264666, 0.08959608525037766, 0.019875774160027504, 0.1095348373055458, 0.021621035411953926, -0.05073509365320206, 0.018941430374979973, -0.17124509811401367, 0.1638147234916687, 0.03879954665899277, 0.09524580836296082, -0.0668862834572792, -0.06066169962286949, -0.03841274976730347, -0.08826269209384918, 0.00416795676574111, 0.006983133032917976, -0.1798412799835205, -0.07890740036964417, -0.0816044807434082, 0.1887720823287964, 0.021442515775561333, 0.03523416072130203, -0.03759371116757393, 0.0009021472069434822, 0.22164030373096466, -0.037206899374723434, -0.017100345343351364, 0.021412737667560577, -0.014215618371963501, -0.009206102229654789, -0.11814959347248077, 0.045800477266311646, -0.0682300552725792, 0.08248639851808548, -0.07821714133024216, -0.026835886761546135, -0.036021504551172256, -0.1634204238653183, 0.12453947961330414, -0.04298582300543785, -0.060410384088754654, -0.040427692234516144, -0.20063377916812897, 0.016377922147512436, -0.049408894032239914, -0.07382562756538391, 0.027172474190592766, -0.07064845412969589, -0.05835649371147156, 0.012773187831044197, -0.04172029718756676, 0.060339733958244324, -0.08350352942943573, 0.026131244376301765, -0.14451541006565094, 0.05507389456033707, -0.1625654101371765, -0.014950810000300407, -0.10132814943790436, 0.06864526122808456, -0.23930084705352783, 0.050815336406230927, -0.1965617835521698, 0.08160489797592163, -0.1308901011943817, -0.06199610233306885, -0.014367503114044666, 0.01506453100591898, -0.13813267648220062, 0.15259599685668945, -0.13745735585689545, -0.049709126353263855, 0.12380094081163406, -0.060963332653045654, -0.08685898035764694, 0.050671715289354324, 0.05485479533672333, 0.09828167408704758, 0.02549852803349495, 0.32329893112182617, 0.00914942380040884, -0.009969855658710003, 0.029482776299118996, 0.1577359437942505, 0.018481194972991943, -0.07405653595924377, 0.14212295413017273, -0.10636530816555023, 0.009569399058818817, -0.010433242656290531, -0.039650704711675644, 0.0067470865324139595, -0.01895560324192047, -0.0345713272690773, 0.0317455530166626, 0.04861801117658615, 0.13386307656764984, -0.00482623977586627, 0.0656481459736824, -0.05170173943042755, -0.006300598848611116, -0.05233585834503174, -0.004477519541978836, 0.0034640359226614237, 0.007959156297147274, -0.038432102650403976, -0.03303571045398712, 0.02375076152384281, 0.028172800317406654, -0.08153142780065536, -0.11399298906326294, 0.07797355204820633, 0.027781570330262184, 0.09215250611305237, 0.16516779363155365, 0.00603393604978919, -0.0455353818833828, -0.06930337101221085, 0.06322968006134033, -0.023467430844902992, -0.011712683364748955, -0.026266679167747498, -0.15682943165302277, 0.038091160356998444, 0.01741933263838291, 0.0024813958443701267, -0.12243825942277908, -0.011366440914571285, 0.20714187622070312, 0.013754847459495068, 0.0515001080930233, 0.09314978122711182, 0.008236558176577091, 0.08906368166208267, -0.013658123090863228, -0.0019090441055595875, 0.052973609417676926, -0.085702084004879, -0.07792768627405167, 0.1981525868177414, -0.08087050169706345, 0.0006798285176046193, 0.11271470785140991, -0.045279596000909805, 0.04818657040596008, -0.032398924231529236, -0.024164067581295967, -0.017472822219133377, 0.0024095228873193264, -0.12697972357273102, -0.010529076680541039, 0.0533294603228569, 0.07027135044336319, -0.09446372091770172, -0.04375612735748291, 0.004167521838098764, 0.015291634947061539, -0.016724951565265656, 0.12291636317968369, 0.11816626042127609, -0.1691366732120514, 0.15579736232757568, 0.23483890295028687, 0.09429418295621872, 0.19365087151527405, -0.015599751845002174, -0.010062247514724731, -0.013778390362858772, 0.07962971180677414, -0.005122312810271978, 0.10970082134008408, -0.20175345242023468, 0.04111788794398308, 0.10522785037755966, 0.13743047416210175, 0.06709345430135727, -0.09692643582820892, -0.058720167726278305, -0.07344038784503937, -0.09961730241775513, -0.09541161358356476, 0.045804399996995926, 0.04907070845365524, 0.07748500257730484, 0.0036332837771624327, -0.08404969424009323, 0.05653996020555496, -0.07256636023521423, -0.08935260027647018, 0.1575227677822113, -0.14882266521453857, -0.31995558738708496, -0.2213517129421234, 0.04961289092898369, -0.10255733132362366, 0.014082624576985836, 0.029846947640180588, -0.031546324491500854, -0.08606293797492981, -0.06910806894302368, 0.1721201092004776, -0.09500976651906967, -0.09869936108589172, -0.09936603903770447, 0.10911361128091812, -0.1447647511959076, -0.09958373755216599, -0.04327008128166199, 0.01927407830953598, -0.06003246456384659, 0.07697761803865433, -0.1483701765537262, 0.08042449504137039, 0.23355858027935028, 0.03203549608588219, -0.0093962661921978, -0.12327192723751068, 0.11388009041547775, -0.13835151493549347, 0.07326681166887283, 0.06633424013853073, 0.043471962213516235, 0.004767526872456074, 0.2089281678199768, 0.04161808639764786, -0.059323303401470184, -0.009448442608118057, 0.02307339385151863, -0.06363330036401749, -0.31328320503234863, -0.17424198985099792, -0.15273353457450867, 0.1845065951347351, 0.042451098561286926, 0.057058077305555344, 0.15432541072368622, -0.015314570628106594, -0.009697937406599522, 0.10466350615024567, 0.07081303745508194, 0.0629473552107811, 0.24501465260982513, -0.01129066664725542, 0.040921665728092194, -0.1041087880730629, -0.027707956731319427, 0.10793107002973557, 0.14424574375152588, 0.18511971831321716, 0.23105210065841675, 0.32590097188949585, 0.05127723142504692, 0.039909735321998596, 0.09000739455223083, 0.10675536841154099, 0.057422373443841934, -0.01247598510235548, -0.06532642990350723, -0.09550898522138596, 0.09186950325965881, 0.024927912279963493, 0.0464167520403862, -0.18131084740161896, -0.02134247124195099, -0.14893123507499695, 0.11168340593576431, 0.09639590978622437, 0.08596383035182953, -0.03355680778622627, 0.08387576788663864, 0.11165755987167358, -0.015240851789712906, -0.08035191893577576, 0.09744858741760254, 0.16434817016124725, -0.1450449377298355, 0.046432871371507645, -0.016841137781739235, 0.14505013823509216, -0.04417940229177475, 0.05972598120570183, -0.11942169815301895, -0.18283309042453766, 0.002916737226769328, 0.14657799899578094, -0.1485804170370102, 0.3223091959953308, 0.06532570719718933, -0.04940354824066162, -0.13465215265750885, -0.05948464944958687, -0.05381030961871147, 0.18369413912296295, 0.19823795557022095, 0.0529865063726902, -0.06398489326238632, -0.048215821385383606, -0.09094268083572388, -0.024681245908141136, 0.10607211291790009, -0.10463830828666687, -0.03702445328235626, 0.019296769052743912, 0.012660705484449863, -0.007789785508066416, 0.032266367226839066, -0.07030439376831055, -0.06993932276964188, 0.06154954805970192, 0.1424752026796341, 0.00036580697633326054, -0.04017988592386246, 0.03737806901335716, -0.028678884729743004, 0.18703202903270721, -0.33873501420021057, -0.04296489804983139, -0.0958271399140358, -0.1377226859331131, 0.1301172971725464, -0.022108765318989754, -0.03825872018933296, -0.06343496590852737, 0.022743528708815575, -0.09586664289236069, -0.03891922906041145, 0.18357281386852264, -0.031266022473573685, -0.07891139388084412, -0.052885860204696655, 0.14558058977127075, -0.12487644702196121, 0.09178421646356583, -0.014598388224840164, 0.03020431101322174, 0.056572746485471725, -0.16951237618923187, -0.031991731375455856, -0.08026263117790222, -0.039400454610586166, 0.13787850737571716, -0.09253229945898056, 0.04631485417485237, 0.01397579163312912, -0.06477680802345276, 0.24362263083457947, 0.27722519636154175, -0.05690150707960129, 0.12328414618968964, 0.16970014572143555, -0.08620093017816544, -0.3748854398727417, 0.013136129826307297, -0.10148794949054718, -0.027915243059396744, -0.0027774139307439327, -0.13116693496704102, -0.011825992725789547, 0.1556447446346283, -0.04266556352376938, 0.12269874662160873, -0.36802858114242554, -0.11791317909955978, -0.009094198234379292, -0.09575920552015305, 0.1096174418926239, -0.10964705795049667, -0.10794330388307571, -0.09012557566165924, -0.15643726289272308, 0.12529931962490082, -0.11408163607120514, 0.07819552719593048, 0.003447560593485832, 0.054886169731616974, -0.017706196755170822, -0.023541823029518127, 0.20004154741764069, 0.11660660803318024, 0.05587412789463997, -0.023310983553528786, -0.1421152949333191, 0.30745336413383484, 0.030191266909241676, 0.02010127156972885, -0.10093133896589279, -0.04126487299799919, -0.15508614480495453, -0.023323511704802513, -0.09314630925655365, 0.07102583348751068, -0.06007824093103409, -0.09069982916116714, -0.0922221690416336, 0.06403818726539612, -0.0594470351934433, 0.04859701916575432, 0.35395878553390503, -0.08592747151851654, -0.035843782126903534, 0.1649082899093628, 0.12728549540042877, -0.04901266098022461, 0.005731277633458376, -0.002604949288070202, -0.02195274457335472, 0.06531605124473572, -0.1559484750032425, 0.029458865523338318, 0.15500123798847198, -0.03589069843292236, 0.1303982436656952, 0.03846835717558861, -0.01959948055446148, -0.012701357714831829, 0.13559110462665558, -0.0331341028213501, -0.2332531064748764, -0.0006378001999109983, 0.0048199123702943325, 0.03737741708755493, -0.05156289413571358, 0.1022803857922554, 0.023133253678679466, 0.05547134578227997, 0.052873048931360245, 0.002647790824994445, -0.08202579617500305, 0.061390820890665054, -0.024069689214229584, 0.10359929502010345, -0.11175990849733353, 0.14681889116764069, 0.1638961285352707, -0.09685596823692322, -0.09438346326351166, 0.18636898696422577, -0.10676403343677521, -0.08691688627004623, -0.059697914868593216, 0.07957590371370316, -0.04472861438989639, -0.05949448421597481, -0.0332690067589283, -0.14054548740386963, -0.0031684497371315956, 0.11321555823087692, -0.008648592978715897, 0.06776829808950424, 0.02949959971010685, -0.07774098962545395, 0.06234249472618103, 0.010404792614281178, -0.1756521612405777, 0.031651902943849564, -0.03649694100022316, 0.004913876298815012, -0.022020822390913963, 0.06490518152713776, -0.035393670201301575, -0.008311583660542965, -0.13764403760433197, 0.019758835434913635, -0.10233578085899353, 0.045224376022815704, -0.06457815319299698, -0.05191624164581299, -0.016396300867199898, 0.01403774879872799, -0.05978141352534294, -0.07331227511167526, -0.13415037095546722, 0.0029368733521550894, -0.05948653817176819, 0.1728852540254593, -0.03592151775956154, -0.03227189555764198, 0.07040416449308395, -0.0076223756186664104, 0.10874606668949127, -0.044694412499666214, -0.027364132925868034, 0.11987003684043884, -0.1106332391500473, 0.04483397305011749, 0.08784313499927521, 0.04879489541053772, 0.07003012299537659, 0.08846745640039444, -0.05647912621498108, 0.014071854762732983, 0.09144216775894165, 0.10124846547842026, -0.21101702749729156, -0.14698021113872528, 0.03928682580590248, -0.09070320427417755, -0.18916232883930206, 0.030034953728318214, 0.003659541020169854, 0.030859481543302536, -0.024976445361971855, 0.07885010540485382, -0.001213315175846219, 0.030648373067378998, -0.048841699957847595, 0.0006716322968713939, -0.014036159962415695, -0.05448317155241966, -0.026060258969664574, -0.08080563694238663, -0.012862762436270714, -0.023031502962112427, 0.26936471462249756, 0.11017617583274841, -0.046028587967157364, 0.0819045901298523, 0.007282725069671869, 0.008741635829210281, 0.046737901866436005, 0.18694336712360382, 0.11072233319282532, -0.025349372997879982, -0.09191720932722092, -0.05549716204404831, -0.036131035536527634, 0.05164574459195137, 0.26422467827796936, 0.16440299153327942, 0.06530734151601791, 0.03357138857245445, 0.11269558221101761, -0.04637230560183525, -0.05904899910092354, 0.08237408101558685, 0.14089947938919067, 0.08173531293869019, -0.06502965092658997, 0.03960781544446945, 0.11954193562269211, -0.1984250396490097, 0.07113957405090332, -0.005717260763049126, -0.0667385682463646, -0.08444209396839142, -0.08075084537267685, -0.06364433467388153, -0.13494545221328735, 0.012052392587065697, -0.11878344416618347, 0.06990716606378555, 0.08471659570932388, 0.07933706045150757, -0.005880923010408878, 0.09756068885326385, -0.18045249581336975, -0.07474585622549057, 0.0998571440577507, -0.012258180417120457, 0.0881999284029007, -0.13618811964988708, -0.003101530484855175, -0.0030762243550270796, -0.05189574882388115, -0.016827426850795746, 0.0935816839337349, -0.04619576781988144, -0.093590147793293, -0.1545897126197815, -0.07949669659137726, -0.04840122535824776, 0.06034938246011734, -0.0286797434091568, 0.2072233408689499, 0.04861883446574211, -0.06458208709955215, 0.041563741862773895, 0.10824964195489883, -0.04254484549164772, -0.21859899163246155, -0.011414511129260063, 0.22953839600086212, -0.0018251595320180058, 0.11510444432497025, -0.04371902346611023, -0.07062327861785889, -0.06675238907337189, 0.16309809684753418, 0.2724533677101135, -0.034184642136096954, -0.03978492319583893, -0.04784185439348221, 0.03123537264764309, 0.04201292619109154, 0.12350974977016449, -0.0789266973733902, 0.2627039849758148, -0.031289491802453995, -0.017413128167390823, -0.0025593782775104046, -0.011664830148220062, -0.11943405866622925, 0.05656298249959946, 0.04853581637144089, -0.08906003832817078, -0.10019047558307648, 0.19004268944263458, -0.1428818255662918, 0.05866512656211853, 0.10424859821796417, -0.10765643417835236, -0.0547807514667511, -0.004860009998083115, 0.05367988720536232, -0.019251925870776176, 0.06531175971031189, -0.05512303486466408, -0.10872361809015274, -0.11117342114448547, 0.012749518267810345, -0.1570165902376175, -0.08558934181928635, 0.05870291590690613, 0.013692455366253853, 0.15370848774909973, -0.02688661590218544, 0.15367938578128815, 0.08878146857023239, -0.0327967032790184, -0.07721633464097977, 0.06038253381848335, 0.07860744744539261, 0.0441875159740448, -0.03961842507123947, 0.053767479956150055, -0.02385232411324978, 0.04145129770040512, 0.056177329272031784, -0.12439454346895218, 0.029893212020397186, 0.1250007301568985, -0.03480043262243271, -0.08001945167779922, -0.03888293728232384, -0.06035890430212021, 0.051379214972257614, 0.0975411906838417, 0.004579645115882158, -0.011113114655017853, -0.03506740182638168, -0.002718238392844796, 0.004376446828246117, 0.02421037293970585, -0.07873491197824478, -0.06652336567640305, -0.033175449818372726, 0.1229068711400032, 0.05753025412559509, -0.2683432996273041, 0.02986232563853264, -0.04016120359301567, 0.133259579539299, -0.07267005741596222, 0.11594793200492859, 0.07992388308048248, -0.09196420758962631, 0.015000131912529469, -0.41351842880249023, 0.05192863568663597, 0.0666571632027626, 0.000676734431181103, -0.06393145024776459 ]
56fe9997400d094be443e30173da467adb86da14
# Dataset Card for "namuwiki_20210301_570125" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
anhdungitvn/namuwiki_20210301_570125
[ "region:us" ]
2024-01-25T12:49:35+00:00
{"configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}], "dataset_info": {"features": [{"name": "text", "dtype": "string"}, {"name": "title", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 6065323543, "num_examples": 866154}], "download_size": 3502422856, "dataset_size": 6065323543}}
2024-01-25T12:54:20+00:00
[]
[]
TAGS #region-us
# Dataset Card for "namuwiki_20210301_570125" More Information needed
[ "# Dataset Card for \"namuwiki_20210301_570125\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"namuwiki_20210301_570125\"\n\nMore Information needed" ]
[ 6, 21 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"namuwiki_20210301_570125\"\n\nMore Information needed" ]
[ -0.07247255742549896, 0.23016871511936188, -0.0011083789868280292, 0.009738080203533173, 0.08971010893583298, 0.10373147577047348, 0.09933525323867798, 0.06376427412033081, 0.10982812941074371, 0.03786037862300873, 0.18738946318626404, 0.05581260472536087, 0.1473143845796585, 0.15196393430233002, -0.015001785010099411, -0.09636257588863373, 0.0804252028465271, 0.033082541078329086, 0.009792990982532501, 0.06526714563369751, -0.037256110459566116, -0.07073663175106049, 0.10825327783823013, -0.059660349041223526, -0.20524092018604279, 0.08263079077005386, -0.055541038513183594, -0.059265684336423874, 0.04415036737918854, -0.07666496932506561, 0.18533767759799957, -0.018026631325483322, 0.017286241054534912, -0.17434285581111908, -0.0008952461648732424, -0.007446590345352888, -0.05479022115468979, 0.03587839752435684, 0.04726758599281311, -0.01097752247005701, 0.01851125992834568, -0.023206206038594246, -0.021827932447195053, -0.010559581220149994, -0.16635443270206451, -0.16040077805519104, -0.16550426185131073, 0.01358046568930149, 0.03058852255344391, 0.011487110517919064, 0.07257471978664398, 0.17973878979682922, -0.07286696135997772, 0.02672169916331768, 0.08204308152198792, -0.10752684623003006, 0.04226914793252945, 0.18300577998161316, -0.03427164629101753, 0.055031996220350266, -0.005473520141094923, 0.04710313305258751, 0.1187395378947258, -0.0322345532476902, -0.14077799022197723, 0.003621224779635668, -0.09591682255268097, 0.08256924897432327, -0.0004673403163906187, -0.07318870723247528, 0.3382234275341034, 0.07184313237667084, 0.011085673235356808, 0.06163672357797623, -0.0010689502814784646, -0.07133252918720245, 0.017269207164645195, 0.04461216926574707, 0.007525580935180187, 0.007806207984685898, 0.0883854404091835, 0.04735193029046059, -0.09853073954582214, -0.13369634747505188, -0.12307359278202057, 0.06434375047683716, -0.038678884506225586, 0.18151110410690308, -0.21264269948005676, -0.08409349620342255, -0.05424758046865463, -0.007216543424874544, -0.02379385195672512, -0.0698360949754715, 0.03055567666888237, -0.019572807475924492, 0.03437809646129608, -0.05193374305963516, 0.08911203593015671, -0.018294407054781914, -0.009025509469211102, 0.08903925120830536, -0.05399390310049057, 0.059307925403118134, 0.17036166787147522, -0.06408749520778656, -0.05133529007434845, 0.025522343814373016, -0.07567302882671356, -0.15309354662895203, 0.015104113146662712, -0.10371508449316025, -0.0710144117474556, -0.01645383983850479, -0.12170334905385971, 0.08523426949977875, -0.0571465864777565, -0.105267234146595, -0.04505719244480133, -0.04759027063846588, 0.23766231536865234, -0.061669476330280304, -0.004003318026661873, 0.023832956328988075, -0.08386210352182388, 0.07966551184654236, -0.09617620706558228, -0.012305371463298798, 0.020977845415472984, 0.08885152637958527, -0.11308719962835312, -0.005142975598573685, -0.023514796048402786, -0.0004154732159804553, 0.12782692909240723, -0.1901581734418869, 0.07848765701055527, -0.07174704968929291, -0.2588734030723572, 0.02408180944621563, 0.03750305622816086, -0.04400668293237686, 0.1688186526298523, 0.010028630495071411, 0.09376078099012375, 0.017101000994443893, -0.01985582336783409, 0.055707354098558426, -0.07178717106580734, 0.014508259482681751, -0.019360993057489395, 0.06680130958557129, -0.2084725797176361, 0.005478951148688793, -0.09044499695301056, 0.0617167204618454, 0.12268142402172089, -0.0756044015288353, -0.13938316702842712, 0.05399437993764877, -0.08352188766002655, 0.012910732999444008, -0.12305179238319397, 0.004097626078873873, 0.029916778206825256, 0.08217601478099823, -0.14492328464984894, -0.019854260608553886, 0.2486715018749237, -0.16066934168338776, -0.1756870150566101, -0.016611885279417038, 0.016717735677957535, -0.023747026920318604, -0.010921189561486244, 0.31731635332107544, -0.03921890631318092, -0.10116451978683472, -0.04715820401906967, 0.1040307804942131, -0.1840202957391739, -0.321025550365448, 0.06655795872211456, -0.03334074467420578, -0.13665801286697388, 0.02509937807917595, 0.20400480926036835, 0.015300306491553783, -0.04176091030240059, -0.0844656452536583, -0.0037036400754004717, -0.15003906190395355, 0.028926121070981026, 0.01751391403377056, 0.04369216784834862, -0.017193887382745743, 0.14211586117744446, 0.031524598598480225, 0.11536700278520584, 0.028242604807019234, -0.03926839679479599, -0.053420957177877426, 0.029780123382806778, -0.13169151544570923, -0.03078961744904518, -0.07526328414678574, -0.08289501816034317, 0.007138154469430447, -0.07376570254564285, -0.010151528753340244, -0.10290566831827164, 0.10746043920516968, -0.023180635645985603, 0.04103108495473862, 0.04355791211128235, 0.08758292347192764, 0.08439881354570389, 0.055688947439193726, 0.04999123141169548, 0.016410203650593758, -0.07216950505971909, -0.10260557383298874, -0.10231896489858627, -0.07657559961080551, 0.0830223336815834, 0.05581926554441452, 0.03605949878692627, -0.0519733652472496, 0.1037122830748558, 0.04725869372487068, 0.04609035700559616, -0.040166739374399185, 0.025233980268239975, -0.05332968384027481, -0.022174473851919174, 0.030340924859046936, -0.0397595576941967, 0.17568235099315643, 0.12014294415712357, -0.023029472678899765, 0.029315946623682976, 0.006710969842970371, 0.05400726944208145, -0.03752950578927994, -0.010366412810981274, 0.09810537099838257, -0.23503202199935913, -0.051015548408031464, 0.030958721414208412, -0.06558006256818771, 0.09411684423685074, 0.07593900710344315, -0.04103953763842583, -0.17682583630084991, 0.0629652664065361, 0.2057526856660843, -0.3096654713153839, 0.16638536751270294, 0.18934285640716553, 0.11433184891939163, 0.19096344709396362, -0.06229516491293907, -0.1136697456240654, -0.012318483553826809, -0.04997965693473816, -0.057946234941482544, 0.1696130931377411, -0.05116114765405655, 0.012007780373096466, 0.07540128380060196, 0.03902032598853111, 0.09845244139432907, -0.09318951517343521, -0.1525498330593109, -0.011036676354706287, 0.022440511733293533, -0.18711890280246735, 0.04464692249894142, -0.028621768578886986, 0.053246092051267624, 0.05731385201215744, 0.058842819184064865, 0.11502310633659363, 0.02433175779879093, -0.00878961943089962, 0.1202944964170456, -0.16508112847805023, -0.2789252698421478, -0.06972579658031464, -0.1347610056400299, 0.02942640893161297, -0.034400857985019684, -0.0703977718949318, -0.22049672901630402, -0.03585587069392204, 0.03223947435617447, -0.12478611618280411, -0.21381253004074097, -0.04381253570318222, 0.1061365082859993, 0.015972180292010307, -0.10352499783039093, -0.07233436405658722, 0.006445309147238731, -0.06105543300509453, 0.12508364021778107, 0.1044430285692215, -0.13771213591098785, 0.14409713447093964, 0.08362650126218796, -0.04771920666098595, 0.12596745789051056, 0.02818811871111393, 0.07968396693468094, -0.03950093314051628, -0.04889018088579178, 0.11739655584096909, 0.0008795122266747057, 0.03726730868220329, 0.006156152579933405, 0.08011080324649811, -0.14736385643482208, -0.024635078385472298, -0.012406141497194767, -0.1799086481332779, -0.264037162065506, -0.12399229407310486, -0.0432557612657547, 0.16799668967723846, 0.08919835090637207, 0.05220908671617508, -0.008690875954926014, 0.10833127796649933, 0.16718809306621552, -0.056776177138090134, -0.19605986773967743, -0.026514926925301552, -0.10256695747375488, 0.02502955123782158, -0.046883244067430496, -0.1659674197435379, 0.060286860913038254, 0.10613960027694702, 0.22486120462417603, 0.14938975870609283, 0.17357295751571655, 0.11866287887096405, 0.023510240018367767, 0.17340929806232452, 0.11192844063043594, 0.1498761922121048, 0.09633567184209824, -0.06587902456521988, 0.048575595021247864, 0.0061083282344043255, -0.05601772293448448, 0.004608524031937122, 0.04677005112171173, -0.133163183927536, 0.08943290263414383, -0.1054970845580101, -0.04686204344034195, -0.14739172160625458, 0.12052620947360992, -0.21500150859355927, 0.06501180678606033, -0.03218727558851242, 0.15385495126247406, -0.09513809531927109, 0.08967330306768417, 0.024086132645606995, -0.04632923752069473, 0.11909674108028412, 0.045867010951042175, 0.03267645090818405, -0.037385378032922745, -0.05079103633761406, -0.07958072423934937, -0.0937938243150711, -0.030355127528309822, 0.09326530247926712, -0.08276787400245667, 0.2734430134296417, 0.037808604538440704, -0.01721358671784401, -0.12080113589763641, -0.07729572802782059, -0.02912185713648796, -0.0034273939672857523, 0.1432516723871231, 0.06573221832513809, -0.20005443692207336, -0.18123358488082886, -0.07763548195362091, -0.03285462409257889, 0.13068608939647675, 0.1465398520231247, -0.15122859179973602, 0.07327013462781906, 0.04106232523918152, -0.07495072484016418, -0.08431535214185715, -0.0866752490401268, -0.10889425873756409, 0.01065222080796957, 0.08001184463500977, -0.10833168029785156, 0.055997516959905624, 0.03802907466888428, -0.07073633372783661, 0.05973808094859123, 0.10418969392776489, 0.003217108314856887, -0.09581997990608215, -0.06158829852938652, 0.13175508379936218, -0.02063283510506153, -0.01361953467130661, -0.0198214128613472, -0.07419425249099731, -0.04328758642077446, -0.1963847577571869, 0.08723793923854828, -0.05195152387022972, 0.09280292689800262, -0.061811454594135284, 0.04613490402698517, 0.01692165434360504, -0.038851384073495865, -0.024310054257512093, 0.040766213089227676, -0.004027839750051498, -0.060973767191171646, 0.2010170966386795, -0.06722207367420197, 0.10836490988731384, 0.1563108116388321, 0.15375541150569916, 0.04941520467400551, 0.07121197134256363, -0.10291625559329987, 0.1664150506258011, 0.08948787301778793, -0.03489124774932861, 0.18550635874271393, 0.20415684580802917, -0.021502772346138954, -0.2124699354171753, -0.02630574442446232, -0.156177818775177, -0.05696399137377739, 0.0035012667067348957, -0.2377265840768814, 0.14376646280288696, 0.18965177237987518, -0.030597791075706482, 0.2666422426700592, -0.13335373997688293, 0.020266283303499222, 0.14756956696510315, 0.09204021841287613, 0.3581138253211975, -0.10791496932506561, -0.05042316019535065, -0.0711798369884491, -0.2537575364112854, 0.31368961930274963, -0.04387982562184334, 0.018424972891807556, -0.05750078707933426, 0.13204500079154968, 0.0019009840907528996, -0.07509808987379074, 0.17148208618164062, -0.016893258318305016, 0.07616427540779114, -0.09312371909618378, -0.09558482468128204, 0.10881480574607849, -0.0544913187623024, 0.06662692129611969, 0.018381943926215172, 0.019152699038386345, -0.1435430645942688, -0.013285131193697453, 0.0003870391519740224, 0.005658127833157778, 0.0471145361661911, -0.05705859884619713, -0.08142281323671341, 0.023453472182154655, -0.09710251539945602, -0.01083743292838335, 0.1858501136302948, 0.04580124467611313, 0.003389903577044606, 0.0851074829697609, -0.014250349253416061, -0.05764241889119148, 0.09353414922952652, -0.0856340304017067, -0.11146751046180725, 0.11728863418102264, -0.25704070925712585, 0.0173620767891407, 0.09414021670818329, 0.050684329122304916, 0.019012005999684334, 0.018676018342375755, -0.08054579794406891, 0.025491492822766304, 0.11517456918954849, -0.08080951869487762, -0.04040833190083504, 0.08309153467416763, -0.006957134697586298, 0.08561550825834274, 0.08969678729772568, -0.0022645967546850443, 0.023365888744592667, 0.00029874398023821414, -0.049307629466056824, 0.06717459112405777, -0.09077621251344681, 0.03984828293323517, 0.09214663505554199, 0.03209011256694794, -0.16235674917697906, 0.258952260017395, 0.034654367715120316, -0.10195207595825195, 0.07994118332862854, 0.038649607449769974, -0.07340461015701294, -0.05846235901117325, -0.07313457131385803, 0.16900335252285004, -0.06221849471330643, -0.20777973532676697, 0.08805539458990097, 0.003981216810643673, 0.024929415434598923, 0.14376193284988403, -0.0015906422631815076, 0.07830578088760376, 0.03495543822646141, -0.03767922520637512, -0.09448401629924774, -0.13135993480682373, -0.09207891672849655, 0.017788998782634735, -0.13504615426063538, -0.23348906636238098, -0.01129455491900444, 0.1636413186788559, -0.03625553473830223, -0.06800515949726105, -0.11434200406074524, 0.07583102583885193, -0.08873968571424484, 0.05039218068122864, -0.0798664465546608, 0.010705706663429737, -0.004042556043714285, 0.014274735935032368, -0.05186237767338753, -0.0025408538058400154, -0.10731291025876999, 0.09910931438207626, 0.06771436333656311, -0.011525374837219715, -0.06030033528804779, 0.006585138384252787, 0.08609241992235184, 0.07169385254383087, 0.030968718230724335, 0.12322210520505905, 0.03295724466443062, 0.11898081004619598, -0.07370460778474808, -0.038998525589704514, 0.031556952744722366, 0.03248884528875351, 0.05636875703930855, 0.1282327026128769, -0.03857129439711571, 0.058532506227493286, -0.06623337417840958, 0.040534090250730515, -0.006017746403813362, -0.06484747678041458, -0.023457830771803856, -0.10585218667984009, -0.11832807958126068, -0.02254651114344597, -0.02694358117878437, 0.21522435545921326, -0.04791795462369919, -0.03714020177721977, 0.0951102003455162, 0.009710174985229969, 0.02180502936244011, -0.04508834704756737, -0.006570239085704088, -0.09447979182004929, -0.041414130479097366, 0.05649183690547943, 0.007910644635558128, -0.08306010812520981, 0.37763363122940063, -0.09424843639135361, -0.20620645582675934, -0.053790461272001266, 0.07151322811841965, -0.17235995829105377, -0.020158758386969566, 0.36286935210227966, 0.06765829026699066, -0.06753015518188477, -0.012427624315023422, 0.05374810844659805, -0.061323683708906174, 0.16328950226306915, 0.06329507380723953, 0.12493623048067093, -0.012704743072390556, -0.008626779541373253, 0.0177596602588892, -0.06315496563911438, -0.011073167435824871, -0.016498902812600136, -0.10190271586179733, 0.027764320373535156, 0.10591556131839752, -0.06034650281071663, 0.09312956035137177, 0.03449256345629692, -0.03567246347665787, -0.04087568819522858, -0.0930539071559906, -0.09626173973083496, -0.19693531095981598, -0.05568389967083931, -0.073244608938694, 0.1024383082985878, -0.015926966443657875, 0.03045092523097992, 0.2639475166797638, 0.09408597648143768, 0.01843397319316864, 0.060913532972335815, 0.03888954222202301, 0.08263403177261353, -0.017951980233192444, -0.02034865692257881, -0.10964811593294144, 0.007616820279508829, -0.06896226108074188, 0.05203648656606674, -0.10577844828367233, -0.017140528187155724, -0.024453362450003624, 0.026461070403456688, 0.0230912696570158, -0.07929208129644394, -0.05430987849831581, -0.05116773396730423, 0.07993809133768082, -0.0640728771686554, 0.03278381749987602, 0.005719365552067757, 0.05835964158177376, 0.021025076508522034, 0.02433103509247303, 0.0415261946618557, 0.09040150046348572, -0.015878837555646896, 0.003913913853466511, -0.05770844966173172, 0.07164818048477173, -0.014179213903844357, -0.036632660776376724, -0.05038048326969147, 0.16087990999221802, 0.16005174815654755, 0.04473734274506569, -0.01648184470832348, 0.01428704522550106, 0.047786299139261246, -0.04178542643785477, 0.1618136465549469, -0.012378028593957424, -0.026326829567551613, -0.011953459121286869, -0.08807455748319626, -0.04570622742176056, -0.08870829641819, -0.028687182813882828, 0.027503320947289467, 0.08647578209638596, -0.06984948366880417, -0.12443814426660538, 0.12437142431735992, -0.22866839170455933, 0.16178293526172638, 0.1808243691921234, -0.12780772149562836, -0.1046852394938469, -0.01780485361814499, 0.04675421491265297, 0.020249882712960243, -0.00781654380261898, -0.0898529663681984, -0.03747231885790825, -0.13740113377571106, 0.05039454996585846, -0.35062316060066223, -0.1726779043674469, -0.03383565694093704, 0.028290515765547752, 0.16545546054840088, 0.010517221875488758, 0.0887390598654747, 0.027455007657408714, 0.0501173734664917, -0.10035386681556702, 0.06674707680940628, 0.0084642693400383, 0.09049084782600403, -0.08259133994579315, 0.011065359227359295, -0.03030424937605858, -0.05432318150997162, 0.01582561619579792, -0.10752793401479721, -0.0888592079281807, 0.2280379831790924, 0.06655687838792801, 0.023469334468245506, 0.025842217728495598, -0.07518894225358963, 0.13409848511219025, 0.00596478208899498, -0.0053259748965501785, -0.035955000668764114, -0.01733565516769886, 0.11962655186653137, -0.0032582946587353945, -0.1658000349998474, -0.08819761872291565, 0.006657026242464781, -0.08691643923521042, 0.06143118441104889, 0.0489048957824707, -0.054376062005758286, 0.08811947703361511, -0.09859715402126312, 0.0324028842151165, -0.006894854828715324, 0.023170823231339455, 0.12626227736473083, 0.06106669828295708, -0.027701089158654213, -0.011508766561746597, 0.08112781494855881, 0.011218700557947159, -0.010806415230035782, -0.10317627340555191 ]
334bdb0af2129a6a5db23f6931d56c39c3a7820b
# Dataset Card for Evaluation run of Severus27/BeingWell_llama2_7b <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [Severus27/BeingWell_llama2_7b](https://huggingface.co/Severus27/BeingWell_llama2_7b) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_Severus27__BeingWell_llama2_7b", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-01-25T12:58:09.380346](https://huggingface.co/datasets/open-llm-leaderboard/details_Severus27__BeingWell_llama2_7b/blob/main/results_2024-01-25T12-58-09.380346.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.4765868531439815, "acc_stderr": 0.03429398083101181, "acc_norm": 0.481128976846689, "acc_norm_stderr": 0.03505205359893574, "mc1": 0.30599755201958384, "mc1_stderr": 0.016132229728155048, "mc2": 0.4593111895966161, "mc2_stderr": 0.015226872222356481 }, "harness|arc:challenge|25": { "acc": 0.5025597269624573, "acc_stderr": 0.014611199329843788, "acc_norm": 0.5494880546075085, "acc_norm_stderr": 0.014539646098471627 }, "harness|hellaswag|10": { "acc": 0.5879306910973909, "acc_stderr": 0.0049120153691600745, "acc_norm": 0.7827126070503884, "acc_norm_stderr": 0.0041155695522309375 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.3, "acc_stderr": 0.04605661864718381, "acc_norm": 0.3, "acc_norm_stderr": 0.04605661864718381 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.42962962962962964, "acc_stderr": 0.04276349494376599, "acc_norm": 0.42962962962962964, "acc_norm_stderr": 0.04276349494376599 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.48026315789473684, "acc_stderr": 0.040657710025626036, "acc_norm": 0.48026315789473684, "acc_norm_stderr": 0.040657710025626036 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.45, "acc_stderr": 0.049999999999999996, "acc_norm": 0.45, "acc_norm_stderr": 0.049999999999999996 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.5283018867924528, "acc_stderr": 0.030723535249006107, "acc_norm": 0.5283018867924528, "acc_norm_stderr": 0.030723535249006107 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.5138888888888888, "acc_stderr": 0.04179596617581, "acc_norm": 0.5138888888888888, "acc_norm_stderr": 0.04179596617581 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.29, "acc_stderr": 0.045604802157206845, "acc_norm": 0.29, "acc_norm_stderr": 0.045604802157206845 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.39, "acc_stderr": 0.04902071300001974, "acc_norm": 0.39, "acc_norm_stderr": 0.04902071300001974 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.29, "acc_stderr": 0.045604802157206845, "acc_norm": 0.29, "acc_norm_stderr": 0.045604802157206845 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.3930635838150289, "acc_stderr": 0.03724249595817731, "acc_norm": 0.3930635838150289, "acc_norm_stderr": 0.03724249595817731 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.23529411764705882, "acc_stderr": 0.04220773659171453, "acc_norm": 0.23529411764705882, "acc_norm_stderr": 0.04220773659171453 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.56, "acc_stderr": 0.04988876515698589, "acc_norm": 0.56, "acc_norm_stderr": 0.04988876515698589 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.4085106382978723, "acc_stderr": 0.03213418026701576, "acc_norm": 0.4085106382978723, "acc_norm_stderr": 0.03213418026701576 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.34210526315789475, "acc_stderr": 0.04462917535336936, "acc_norm": 0.34210526315789475, "acc_norm_stderr": 0.04462917535336936 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.47586206896551725, "acc_stderr": 0.0416180850350153, "acc_norm": 0.47586206896551725, "acc_norm_stderr": 0.0416180850350153 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.2777777777777778, "acc_stderr": 0.023068188848261128, "acc_norm": 0.2777777777777778, "acc_norm_stderr": 0.023068188848261128 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.25396825396825395, "acc_stderr": 0.038932596106046734, "acc_norm": 0.25396825396825395, "acc_norm_stderr": 0.038932596106046734 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.35, "acc_stderr": 0.047937248544110196, "acc_norm": 0.35, "acc_norm_stderr": 0.047937248544110196 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.5451612903225806, "acc_stderr": 0.028327743091561074, "acc_norm": 0.5451612903225806, "acc_norm_stderr": 0.028327743091561074 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.3645320197044335, "acc_stderr": 0.033864057460620905, "acc_norm": 0.3645320197044335, "acc_norm_stderr": 0.033864057460620905 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.34, "acc_stderr": 0.04760952285695235, "acc_norm": 0.34, "acc_norm_stderr": 0.04760952285695235 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.593939393939394, "acc_stderr": 0.03834816355401181, "acc_norm": 0.593939393939394, "acc_norm_stderr": 0.03834816355401181 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.6060606060606061, "acc_stderr": 0.034812853382329624, "acc_norm": 0.6060606060606061, "acc_norm_stderr": 0.034812853382329624 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.6994818652849741, "acc_stderr": 0.0330881859441575, "acc_norm": 0.6994818652849741, "acc_norm_stderr": 0.0330881859441575 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.4230769230769231, "acc_stderr": 0.02504919787604234, "acc_norm": 0.4230769230769231, "acc_norm_stderr": 0.02504919787604234 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.24814814814814815, "acc_stderr": 0.026335739404055803, "acc_norm": 0.24814814814814815, "acc_norm_stderr": 0.026335739404055803 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.40756302521008403, "acc_stderr": 0.031918633744784645, "acc_norm": 0.40756302521008403, "acc_norm_stderr": 0.031918633744784645 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.304635761589404, "acc_stderr": 0.03757949922943343, "acc_norm": 0.304635761589404, "acc_norm_stderr": 0.03757949922943343 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.6587155963302752, "acc_stderr": 0.020328612816592446, "acc_norm": 0.6587155963302752, "acc_norm_stderr": 0.020328612816592446 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.2916666666666667, "acc_stderr": 0.030998666304560524, "acc_norm": 0.2916666666666667, "acc_norm_stderr": 0.030998666304560524 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.6617647058823529, "acc_stderr": 0.03320574612945432, "acc_norm": 0.6617647058823529, "acc_norm_stderr": 0.03320574612945432 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.6455696202531646, "acc_stderr": 0.03113730429718582, "acc_norm": 0.6455696202531646, "acc_norm_stderr": 0.03113730429718582 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.5964125560538116, "acc_stderr": 0.03292802819330314, "acc_norm": 0.5964125560538116, "acc_norm_stderr": 0.03292802819330314 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.549618320610687, "acc_stderr": 0.04363643698524779, "acc_norm": 0.549618320610687, "acc_norm_stderr": 0.04363643698524779 }, "harness|hendrycksTest-international_law|5": { "acc": 0.6446280991735537, "acc_stderr": 0.0436923632657398, "acc_norm": 0.6446280991735537, "acc_norm_stderr": 0.0436923632657398 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.6018518518518519, "acc_stderr": 0.04732332615978813, "acc_norm": 0.6018518518518519, "acc_norm_stderr": 0.04732332615978813 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.5460122699386503, "acc_stderr": 0.0391170190467718, "acc_norm": 0.5460122699386503, "acc_norm_stderr": 0.0391170190467718 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.3482142857142857, "acc_stderr": 0.04521829902833586, "acc_norm": 0.3482142857142857, "acc_norm_stderr": 0.04521829902833586 }, "harness|hendrycksTest-management|5": { "acc": 0.6504854368932039, "acc_stderr": 0.04721188506097172, "acc_norm": 0.6504854368932039, "acc_norm_stderr": 0.04721188506097172 }, "harness|hendrycksTest-marketing|5": { "acc": 0.7094017094017094, "acc_stderr": 0.029745048572674078, "acc_norm": 0.7094017094017094, "acc_norm_stderr": 0.029745048572674078 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.52, "acc_stderr": 0.050211673156867795, "acc_norm": 0.52, "acc_norm_stderr": 0.050211673156867795 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.6615581098339719, "acc_stderr": 0.01692086958621066, "acc_norm": 0.6615581098339719, "acc_norm_stderr": 0.01692086958621066 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.523121387283237, "acc_stderr": 0.026890297881303118, "acc_norm": 0.523121387283237, "acc_norm_stderr": 0.026890297881303118 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.2435754189944134, "acc_stderr": 0.014355911964767865, "acc_norm": 0.2435754189944134, "acc_norm_stderr": 0.014355911964767865 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.4934640522875817, "acc_stderr": 0.028627470550556047, "acc_norm": 0.4934640522875817, "acc_norm_stderr": 0.028627470550556047 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.5498392282958199, "acc_stderr": 0.02825666072336018, "acc_norm": 0.5498392282958199, "acc_norm_stderr": 0.02825666072336018 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.5524691358024691, "acc_stderr": 0.027667138569422704, "acc_norm": 0.5524691358024691, "acc_norm_stderr": 0.027667138569422704 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.3723404255319149, "acc_stderr": 0.028838921471251458, "acc_norm": 0.3723404255319149, "acc_norm_stderr": 0.028838921471251458 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.34028683181225555, "acc_stderr": 0.012101217610223793, "acc_norm": 0.34028683181225555, "acc_norm_stderr": 0.012101217610223793 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.40441176470588236, "acc_stderr": 0.029812630701569736, "acc_norm": 0.40441176470588236, "acc_norm_stderr": 0.029812630701569736 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.4542483660130719, "acc_stderr": 0.020142974553795198, "acc_norm": 0.4542483660130719, "acc_norm_stderr": 0.020142974553795198 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.5909090909090909, "acc_stderr": 0.04709306978661895, "acc_norm": 0.5909090909090909, "acc_norm_stderr": 0.04709306978661895 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.5061224489795918, "acc_stderr": 0.03200682020163908, "acc_norm": 0.5061224489795918, "acc_norm_stderr": 0.03200682020163908 }, "harness|hendrycksTest-sociology|5": { "acc": 0.6368159203980099, "acc_stderr": 0.03400598505599015, "acc_norm": 0.6368159203980099, "acc_norm_stderr": 0.03400598505599015 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.67, "acc_stderr": 0.04725815626252609, "acc_norm": 0.67, "acc_norm_stderr": 0.04725815626252609 }, "harness|hendrycksTest-virology|5": { "acc": 0.43373493975903615, "acc_stderr": 0.038581589406855174, "acc_norm": 0.43373493975903615, "acc_norm_stderr": 0.038581589406855174 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.695906432748538, "acc_stderr": 0.0352821125824523, "acc_norm": 0.695906432748538, "acc_norm_stderr": 0.0352821125824523 }, "harness|truthfulqa:mc|0": { "mc1": 0.30599755201958384, "mc1_stderr": 0.016132229728155048, "mc2": 0.4593111895966161, "mc2_stderr": 0.015226872222356481 }, "harness|winogrande|5": { "acc": 0.7419100236779794, "acc_stderr": 0.012298278833972387 }, "harness|gsm8k|5": { "acc": 0.18498862774829417, "acc_stderr": 0.010695390472237925 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_Severus27__BeingWell_llama2_7b
[ "region:us" ]
2024-01-25T13:00:33+00:00
{"pretty_name": "Evaluation run of Severus27/BeingWell_llama2_7b", "dataset_summary": "Dataset automatically created during the evaluation run of model [Severus27/BeingWell_llama2_7b](https://huggingface.co/Severus27/BeingWell_llama2_7b) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_Severus27__BeingWell_llama2_7b\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-01-25T12:58:09.380346](https://huggingface.co/datasets/open-llm-leaderboard/details_Severus27__BeingWell_llama2_7b/blob/main/results_2024-01-25T12-58-09.380346.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.4765868531439815,\n \"acc_stderr\": 0.03429398083101181,\n \"acc_norm\": 0.481128976846689,\n \"acc_norm_stderr\": 0.03505205359893574,\n \"mc1\": 0.30599755201958384,\n \"mc1_stderr\": 0.016132229728155048,\n \"mc2\": 0.4593111895966161,\n \"mc2_stderr\": 0.015226872222356481\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.5025597269624573,\n \"acc_stderr\": 0.014611199329843788,\n \"acc_norm\": 0.5494880546075085,\n \"acc_norm_stderr\": 0.014539646098471627\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.5879306910973909,\n \"acc_stderr\": 0.0049120153691600745,\n \"acc_norm\": 0.7827126070503884,\n \"acc_norm_stderr\": 0.0041155695522309375\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.3,\n \"acc_stderr\": 0.04605661864718381,\n \"acc_norm\": 0.3,\n \"acc_norm_stderr\": 0.04605661864718381\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.42962962962962964,\n \"acc_stderr\": 0.04276349494376599,\n \"acc_norm\": 0.42962962962962964,\n \"acc_norm_stderr\": 0.04276349494376599\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.48026315789473684,\n \"acc_stderr\": 0.040657710025626036,\n \"acc_norm\": 0.48026315789473684,\n \"acc_norm_stderr\": 0.040657710025626036\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.45,\n \"acc_stderr\": 0.049999999999999996,\n \"acc_norm\": 0.45,\n \"acc_norm_stderr\": 0.049999999999999996\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.5283018867924528,\n \"acc_stderr\": 0.030723535249006107,\n \"acc_norm\": 0.5283018867924528,\n \"acc_norm_stderr\": 0.030723535249006107\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.5138888888888888,\n \"acc_stderr\": 0.04179596617581,\n \"acc_norm\": 0.5138888888888888,\n \"acc_norm_stderr\": 0.04179596617581\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.29,\n \"acc_stderr\": 0.045604802157206845,\n \"acc_norm\": 0.29,\n \"acc_norm_stderr\": 0.045604802157206845\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.39,\n \"acc_stderr\": 0.04902071300001974,\n \"acc_norm\": 0.39,\n \"acc_norm_stderr\": 0.04902071300001974\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.29,\n \"acc_stderr\": 0.045604802157206845,\n \"acc_norm\": 0.29,\n \"acc_norm_stderr\": 0.045604802157206845\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.3930635838150289,\n \"acc_stderr\": 0.03724249595817731,\n \"acc_norm\": 0.3930635838150289,\n \"acc_norm_stderr\": 0.03724249595817731\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.23529411764705882,\n \"acc_stderr\": 0.04220773659171453,\n \"acc_norm\": 0.23529411764705882,\n \"acc_norm_stderr\": 0.04220773659171453\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.56,\n \"acc_stderr\": 0.04988876515698589,\n \"acc_norm\": 0.56,\n \"acc_norm_stderr\": 0.04988876515698589\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.4085106382978723,\n \"acc_stderr\": 0.03213418026701576,\n \"acc_norm\": 0.4085106382978723,\n \"acc_norm_stderr\": 0.03213418026701576\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.34210526315789475,\n \"acc_stderr\": 0.04462917535336936,\n \"acc_norm\": 0.34210526315789475,\n \"acc_norm_stderr\": 0.04462917535336936\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.47586206896551725,\n \"acc_stderr\": 0.0416180850350153,\n \"acc_norm\": 0.47586206896551725,\n \"acc_norm_stderr\": 0.0416180850350153\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.2777777777777778,\n \"acc_stderr\": 0.023068188848261128,\n \"acc_norm\": 0.2777777777777778,\n \"acc_norm_stderr\": 0.023068188848261128\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.25396825396825395,\n \"acc_stderr\": 0.038932596106046734,\n \"acc_norm\": 0.25396825396825395,\n \"acc_norm_stderr\": 0.038932596106046734\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.35,\n \"acc_stderr\": 0.047937248544110196,\n \"acc_norm\": 0.35,\n \"acc_norm_stderr\": 0.047937248544110196\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.5451612903225806,\n \"acc_stderr\": 0.028327743091561074,\n \"acc_norm\": 0.5451612903225806,\n \"acc_norm_stderr\": 0.028327743091561074\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.3645320197044335,\n \"acc_stderr\": 0.033864057460620905,\n \"acc_norm\": 0.3645320197044335,\n \"acc_norm_stderr\": 0.033864057460620905\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.34,\n \"acc_stderr\": 0.04760952285695235,\n \"acc_norm\": 0.34,\n \"acc_norm_stderr\": 0.04760952285695235\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.593939393939394,\n \"acc_stderr\": 0.03834816355401181,\n \"acc_norm\": 0.593939393939394,\n \"acc_norm_stderr\": 0.03834816355401181\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.6060606060606061,\n \"acc_stderr\": 0.034812853382329624,\n \"acc_norm\": 0.6060606060606061,\n \"acc_norm_stderr\": 0.034812853382329624\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.6994818652849741,\n \"acc_stderr\": 0.0330881859441575,\n \"acc_norm\": 0.6994818652849741,\n \"acc_norm_stderr\": 0.0330881859441575\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.4230769230769231,\n \"acc_stderr\": 0.02504919787604234,\n \"acc_norm\": 0.4230769230769231,\n \"acc_norm_stderr\": 0.02504919787604234\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.24814814814814815,\n \"acc_stderr\": 0.026335739404055803,\n \"acc_norm\": 0.24814814814814815,\n \"acc_norm_stderr\": 0.026335739404055803\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.40756302521008403,\n \"acc_stderr\": 0.031918633744784645,\n \"acc_norm\": 0.40756302521008403,\n \"acc_norm_stderr\": 0.031918633744784645\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.304635761589404,\n \"acc_stderr\": 0.03757949922943343,\n \"acc_norm\": 0.304635761589404,\n \"acc_norm_stderr\": 0.03757949922943343\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.6587155963302752,\n \"acc_stderr\": 0.020328612816592446,\n \"acc_norm\": 0.6587155963302752,\n \"acc_norm_stderr\": 0.020328612816592446\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.2916666666666667,\n \"acc_stderr\": 0.030998666304560524,\n \"acc_norm\": 0.2916666666666667,\n \"acc_norm_stderr\": 0.030998666304560524\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.6617647058823529,\n \"acc_stderr\": 0.03320574612945432,\n \"acc_norm\": 0.6617647058823529,\n \"acc_norm_stderr\": 0.03320574612945432\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.6455696202531646,\n \"acc_stderr\": 0.03113730429718582,\n \"acc_norm\": 0.6455696202531646,\n \"acc_norm_stderr\": 0.03113730429718582\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.5964125560538116,\n \"acc_stderr\": 0.03292802819330314,\n \"acc_norm\": 0.5964125560538116,\n \"acc_norm_stderr\": 0.03292802819330314\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.549618320610687,\n \"acc_stderr\": 0.04363643698524779,\n \"acc_norm\": 0.549618320610687,\n \"acc_norm_stderr\": 0.04363643698524779\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.6446280991735537,\n \"acc_stderr\": 0.0436923632657398,\n \"acc_norm\": 0.6446280991735537,\n \"acc_norm_stderr\": 0.0436923632657398\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.6018518518518519,\n \"acc_stderr\": 0.04732332615978813,\n \"acc_norm\": 0.6018518518518519,\n \"acc_norm_stderr\": 0.04732332615978813\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.5460122699386503,\n \"acc_stderr\": 0.0391170190467718,\n \"acc_norm\": 0.5460122699386503,\n \"acc_norm_stderr\": 0.0391170190467718\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.3482142857142857,\n \"acc_stderr\": 0.04521829902833586,\n \"acc_norm\": 0.3482142857142857,\n \"acc_norm_stderr\": 0.04521829902833586\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.6504854368932039,\n \"acc_stderr\": 0.04721188506097172,\n \"acc_norm\": 0.6504854368932039,\n \"acc_norm_stderr\": 0.04721188506097172\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.7094017094017094,\n \"acc_stderr\": 0.029745048572674078,\n \"acc_norm\": 0.7094017094017094,\n \"acc_norm_stderr\": 0.029745048572674078\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.52,\n \"acc_stderr\": 0.050211673156867795,\n \"acc_norm\": 0.52,\n \"acc_norm_stderr\": 0.050211673156867795\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.6615581098339719,\n \"acc_stderr\": 0.01692086958621066,\n \"acc_norm\": 0.6615581098339719,\n \"acc_norm_stderr\": 0.01692086958621066\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.523121387283237,\n \"acc_stderr\": 0.026890297881303118,\n \"acc_norm\": 0.523121387283237,\n \"acc_norm_stderr\": 0.026890297881303118\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.2435754189944134,\n \"acc_stderr\": 0.014355911964767865,\n \"acc_norm\": 0.2435754189944134,\n \"acc_norm_stderr\": 0.014355911964767865\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.4934640522875817,\n \"acc_stderr\": 0.028627470550556047,\n \"acc_norm\": 0.4934640522875817,\n \"acc_norm_stderr\": 0.028627470550556047\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.5498392282958199,\n \"acc_stderr\": 0.02825666072336018,\n \"acc_norm\": 0.5498392282958199,\n \"acc_norm_stderr\": 0.02825666072336018\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.5524691358024691,\n \"acc_stderr\": 0.027667138569422704,\n \"acc_norm\": 0.5524691358024691,\n \"acc_norm_stderr\": 0.027667138569422704\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.3723404255319149,\n \"acc_stderr\": 0.028838921471251458,\n \"acc_norm\": 0.3723404255319149,\n \"acc_norm_stderr\": 0.028838921471251458\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.34028683181225555,\n \"acc_stderr\": 0.012101217610223793,\n \"acc_norm\": 0.34028683181225555,\n \"acc_norm_stderr\": 0.012101217610223793\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.40441176470588236,\n \"acc_stderr\": 0.029812630701569736,\n \"acc_norm\": 0.40441176470588236,\n \"acc_norm_stderr\": 0.029812630701569736\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.4542483660130719,\n \"acc_stderr\": 0.020142974553795198,\n \"acc_norm\": 0.4542483660130719,\n \"acc_norm_stderr\": 0.020142974553795198\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.5909090909090909,\n \"acc_stderr\": 0.04709306978661895,\n \"acc_norm\": 0.5909090909090909,\n \"acc_norm_stderr\": 0.04709306978661895\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.5061224489795918,\n \"acc_stderr\": 0.03200682020163908,\n \"acc_norm\": 0.5061224489795918,\n \"acc_norm_stderr\": 0.03200682020163908\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.6368159203980099,\n \"acc_stderr\": 0.03400598505599015,\n \"acc_norm\": 0.6368159203980099,\n \"acc_norm_stderr\": 0.03400598505599015\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.67,\n \"acc_stderr\": 0.04725815626252609,\n \"acc_norm\": 0.67,\n \"acc_norm_stderr\": 0.04725815626252609\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.43373493975903615,\n \"acc_stderr\": 0.038581589406855174,\n \"acc_norm\": 0.43373493975903615,\n \"acc_norm_stderr\": 0.038581589406855174\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.695906432748538,\n \"acc_stderr\": 0.0352821125824523,\n \"acc_norm\": 0.695906432748538,\n \"acc_norm_stderr\": 0.0352821125824523\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.30599755201958384,\n \"mc1_stderr\": 0.016132229728155048,\n \"mc2\": 0.4593111895966161,\n \"mc2_stderr\": 0.015226872222356481\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7419100236779794,\n \"acc_stderr\": 0.012298278833972387\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.18498862774829417,\n \"acc_stderr\": 0.010695390472237925\n }\n}\n```", "repo_url": "https://huggingface.co/Severus27/BeingWell_llama2_7b", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_01_25T12_58_09.380346", "path": ["**/details_harness|arc:challenge|25_2024-01-25T12-58-09.380346.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-01-25T12-58-09.380346.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_01_25T12_58_09.380346", "path": ["**/details_harness|gsm8k|5_2024-01-25T12-58-09.380346.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-01-25T12-58-09.380346.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_01_25T12_58_09.380346", "path": ["**/details_harness|hellaswag|10_2024-01-25T12-58-09.380346.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-01-25T12-58-09.380346.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_01_25T12_58_09.380346", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-25T12-58-09.380346.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-25T12-58-09.380346.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-25T12-58-09.380346.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-25T12-58-09.380346.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-25T12-58-09.380346.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-25T12-58-09.380346.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-25T12-58-09.380346.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-25T12-58-09.380346.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-25T12-58-09.380346.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-25T12-58-09.380346.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-25T12-58-09.380346.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-25T12-58-09.380346.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-25T12-58-09.380346.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-25T12-58-09.380346.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-25T12-58-09.380346.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-25T12-58-09.380346.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-25T12-58-09.380346.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-25T12-58-09.380346.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-25T12-58-09.380346.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-25T12-58-09.380346.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-25T12-58-09.380346.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-25T12-58-09.380346.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-25T12-58-09.380346.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-25T12-58-09.380346.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-25T12-58-09.380346.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-25T12-58-09.380346.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-25T12-58-09.380346.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-25T12-58-09.380346.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-25T12-58-09.380346.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-25T12-58-09.380346.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-25T12-58-09.380346.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-25T12-58-09.380346.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-25T12-58-09.380346.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-25T12-58-09.380346.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-25T12-58-09.380346.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-25T12-58-09.380346.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-25T12-58-09.380346.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-25T12-58-09.380346.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-25T12-58-09.380346.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-25T12-58-09.380346.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-25T12-58-09.380346.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-25T12-58-09.380346.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-25T12-58-09.380346.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-25T12-58-09.380346.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-25T12-58-09.380346.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-25T12-58-09.380346.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-25T12-58-09.380346.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-25T12-58-09.380346.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-25T12-58-09.380346.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-25T12-58-09.380346.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-25T12-58-09.380346.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-25T12-58-09.380346.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-25T12-58-09.380346.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-25T12-58-09.380346.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-25T12-58-09.380346.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-25T12-58-09.380346.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-25T12-58-09.380346.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-25T12-58-09.380346.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-25T12-58-09.380346.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-25T12-58-09.380346.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-25T12-58-09.380346.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-25T12-58-09.380346.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-25T12-58-09.380346.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-25T12-58-09.380346.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-25T12-58-09.380346.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-25T12-58-09.380346.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-25T12-58-09.380346.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-25T12-58-09.380346.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-25T12-58-09.380346.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-25T12-58-09.380346.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-25T12-58-09.380346.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-25T12-58-09.380346.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-25T12-58-09.380346.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-25T12-58-09.380346.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-25T12-58-09.380346.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-25T12-58-09.380346.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-25T12-58-09.380346.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-25T12-58-09.380346.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-25T12-58-09.380346.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-25T12-58-09.380346.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-25T12-58-09.380346.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-25T12-58-09.380346.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-25T12-58-09.380346.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-25T12-58-09.380346.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-25T12-58-09.380346.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-25T12-58-09.380346.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-25T12-58-09.380346.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-25T12-58-09.380346.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-25T12-58-09.380346.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-25T12-58-09.380346.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-25T12-58-09.380346.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-25T12-58-09.380346.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-25T12-58-09.380346.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-25T12-58-09.380346.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-25T12-58-09.380346.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-25T12-58-09.380346.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-25T12-58-09.380346.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-25T12-58-09.380346.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-25T12-58-09.380346.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-25T12-58-09.380346.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-25T12-58-09.380346.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-25T12-58-09.380346.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-25T12-58-09.380346.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-25T12-58-09.380346.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-25T12-58-09.380346.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-25T12-58-09.380346.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-25T12-58-09.380346.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-25T12-58-09.380346.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-25T12-58-09.380346.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-25T12-58-09.380346.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-25T12-58-09.380346.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-25T12-58-09.380346.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-25T12-58-09.380346.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-25T12-58-09.380346.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_01_25T12_58_09.380346", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-25T12-58-09.380346.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-25T12-58-09.380346.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_01_25T12_58_09.380346", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-25T12-58-09.380346.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-25T12-58-09.380346.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_01_25T12_58_09.380346", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-25T12-58-09.380346.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-25T12-58-09.380346.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_01_25T12_58_09.380346", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-25T12-58-09.380346.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-25T12-58-09.380346.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_01_25T12_58_09.380346", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-25T12-58-09.380346.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-25T12-58-09.380346.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_01_25T12_58_09.380346", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-25T12-58-09.380346.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-25T12-58-09.380346.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_01_25T12_58_09.380346", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-25T12-58-09.380346.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-25T12-58-09.380346.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_01_25T12_58_09.380346", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-25T12-58-09.380346.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-25T12-58-09.380346.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_01_25T12_58_09.380346", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-25T12-58-09.380346.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-25T12-58-09.380346.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_01_25T12_58_09.380346", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-25T12-58-09.380346.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-25T12-58-09.380346.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_01_25T12_58_09.380346", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-25T12-58-09.380346.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-25T12-58-09.380346.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_01_25T12_58_09.380346", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-25T12-58-09.380346.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-25T12-58-09.380346.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_01_25T12_58_09.380346", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-25T12-58-09.380346.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-25T12-58-09.380346.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_01_25T12_58_09.380346", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-25T12-58-09.380346.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-25T12-58-09.380346.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_01_25T12_58_09.380346", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-25T12-58-09.380346.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-25T12-58-09.380346.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_01_25T12_58_09.380346", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-25T12-58-09.380346.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-25T12-58-09.380346.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_01_25T12_58_09.380346", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-25T12-58-09.380346.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-25T12-58-09.380346.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_01_25T12_58_09.380346", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-25T12-58-09.380346.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-25T12-58-09.380346.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_01_25T12_58_09.380346", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-25T12-58-09.380346.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-25T12-58-09.380346.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_01_25T12_58_09.380346", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-25T12-58-09.380346.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-25T12-58-09.380346.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_01_25T12_58_09.380346", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-25T12-58-09.380346.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-25T12-58-09.380346.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_01_25T12_58_09.380346", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-25T12-58-09.380346.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-25T12-58-09.380346.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_01_25T12_58_09.380346", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-25T12-58-09.380346.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-25T12-58-09.380346.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_01_25T12_58_09.380346", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-25T12-58-09.380346.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-25T12-58-09.380346.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_01_25T12_58_09.380346", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-25T12-58-09.380346.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-25T12-58-09.380346.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_01_25T12_58_09.380346", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-25T12-58-09.380346.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-25T12-58-09.380346.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_01_25T12_58_09.380346", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-25T12-58-09.380346.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-25T12-58-09.380346.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_01_25T12_58_09.380346", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-25T12-58-09.380346.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-25T12-58-09.380346.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_01_25T12_58_09.380346", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-25T12-58-09.380346.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-25T12-58-09.380346.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_01_25T12_58_09.380346", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-25T12-58-09.380346.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-25T12-58-09.380346.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_01_25T12_58_09.380346", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-25T12-58-09.380346.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-25T12-58-09.380346.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_01_25T12_58_09.380346", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-25T12-58-09.380346.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-25T12-58-09.380346.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_01_25T12_58_09.380346", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-25T12-58-09.380346.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-25T12-58-09.380346.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_01_25T12_58_09.380346", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-25T12-58-09.380346.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-25T12-58-09.380346.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_01_25T12_58_09.380346", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-25T12-58-09.380346.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-25T12-58-09.380346.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_01_25T12_58_09.380346", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-25T12-58-09.380346.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-25T12-58-09.380346.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_01_25T12_58_09.380346", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-25T12-58-09.380346.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-25T12-58-09.380346.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_01_25T12_58_09.380346", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-25T12-58-09.380346.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-25T12-58-09.380346.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_01_25T12_58_09.380346", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-25T12-58-09.380346.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-25T12-58-09.380346.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_01_25T12_58_09.380346", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-25T12-58-09.380346.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-25T12-58-09.380346.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_01_25T12_58_09.380346", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-25T12-58-09.380346.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-25T12-58-09.380346.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_01_25T12_58_09.380346", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-25T12-58-09.380346.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-25T12-58-09.380346.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_01_25T12_58_09.380346", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-25T12-58-09.380346.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-25T12-58-09.380346.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_01_25T12_58_09.380346", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-25T12-58-09.380346.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-25T12-58-09.380346.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_01_25T12_58_09.380346", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-25T12-58-09.380346.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-25T12-58-09.380346.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_01_25T12_58_09.380346", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-25T12-58-09.380346.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-25T12-58-09.380346.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_01_25T12_58_09.380346", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-25T12-58-09.380346.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-25T12-58-09.380346.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_01_25T12_58_09.380346", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-25T12-58-09.380346.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-25T12-58-09.380346.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_01_25T12_58_09.380346", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-25T12-58-09.380346.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-25T12-58-09.380346.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_01_25T12_58_09.380346", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-25T12-58-09.380346.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-25T12-58-09.380346.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_01_25T12_58_09.380346", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-25T12-58-09.380346.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-25T12-58-09.380346.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_01_25T12_58_09.380346", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-25T12-58-09.380346.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-25T12-58-09.380346.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_01_25T12_58_09.380346", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-25T12-58-09.380346.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-25T12-58-09.380346.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_01_25T12_58_09.380346", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-25T12-58-09.380346.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-25T12-58-09.380346.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_01_25T12_58_09.380346", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-25T12-58-09.380346.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-25T12-58-09.380346.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_01_25T12_58_09.380346", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-25T12-58-09.380346.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-25T12-58-09.380346.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_01_25T12_58_09.380346", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-25T12-58-09.380346.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-25T12-58-09.380346.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_01_25T12_58_09.380346", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-25T12-58-09.380346.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-25T12-58-09.380346.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_01_25T12_58_09.380346", "path": ["**/details_harness|winogrande|5_2024-01-25T12-58-09.380346.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-01-25T12-58-09.380346.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_01_25T12_58_09.380346", "path": ["results_2024-01-25T12-58-09.380346.parquet"]}, {"split": "latest", "path": ["results_2024-01-25T12-58-09.380346.parquet"]}]}]}
2024-01-25T13:00:56+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of Severus27/BeingWell_llama2_7b Dataset automatically created during the evaluation run of model Severus27/BeingWell_llama2_7b on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-01-25T12:58:09.380346(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of Severus27/BeingWell_llama2_7b\n\n\n\nDataset automatically created during the evaluation run of model Severus27/BeingWell_llama2_7b on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-25T12:58:09.380346(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of Severus27/BeingWell_llama2_7b\n\n\n\nDataset automatically created during the evaluation run of model Severus27/BeingWell_llama2_7b on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-25T12:58:09.380346(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ 6, 189, 68, 4, 40, 29, 3, 4, 9, 6, 5, 7, 4, 7, 10, 9, 5, 9, 8, 10, 46, 8, 7, 10, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of Severus27/BeingWell_llama2_7b\n\n\n\nDataset automatically created during the evaluation run of model Severus27/BeingWell_llama2_7b on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2024-01-25T12:58:09.380346(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):## Dataset Details### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:## Uses### Direct Use### Out-of-Scope Use## Dataset Structure## Dataset Creation### Curation Rationale### Source Data#### Data Collection and Processing#### Who are the source data producers?### Annotations [optional]#### Annotation process#### Who are the annotators?#### Personal and Sensitive Information## Bias, Risks, and Limitations### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:## Glossary [optional]## More Information [optional]## Dataset Card Authors [optional]" ]
[ -0.04664207622408867, 0.19230426847934723, -0.0048807160928845406, 0.0384647510945797, 0.07777879387140274, -0.013280975632369518, 0.03765905648469925, 0.10414330661296844, 0.0009361917618662119, 0.17888566851615906, -0.025097059085965157, 0.10625964403152466, 0.06628292798995972, 0.12504497170448303, 0.016911238431930542, -0.13909083604812622, 0.025769444182515144, -0.08851450681686401, 0.06813043355941772, 0.07615195214748383, 0.07706394046545029, -0.08793152123689651, 0.06587055325508118, -0.03173022344708443, 0.029295703396201134, -0.005768401548266411, -0.08377544581890106, -0.029902245849370956, 0.10146144032478333, 0.11694304645061493, 0.03284488618373871, -0.01346650067716837, 0.020511353388428688, -0.2701709270477295, 0.012888375669717789, 0.09060859680175781, -0.009464274160563946, 0.03062327206134796, 0.12379718571901321, -0.08163067698478699, 0.06718510389328003, -0.039280638098716736, 0.07360108196735382, 0.055455658584833145, -0.10965287685394287, -0.11527581512928009, -0.14455462992191315, -0.006962292827665806, 0.08453673124313354, 0.037057358771562576, -0.01964089274406433, 0.14046652615070343, -0.04709857329726219, 0.046227067708969116, 0.14401108026504517, -0.1004820466041565, -0.018090739846229553, 0.054745957255363464, 0.009067329578101635, 0.06849142909049988, -0.0756465345621109, -0.012653544545173645, 0.03649420291185379, 0.04668048024177551, 0.01983644813299179, 0.009898530319333076, -0.02544628269970417, 0.014574241824448109, -0.1522367000579834, -0.12861910462379456, 0.16471345722675323, 0.016699114814400673, -0.03899550810456276, -0.1863483488559723, -0.029145147651433945, 0.01303856261074543, 0.006261778064072132, -0.028623320162296295, 0.005178072955459356, -0.027914080768823624, 0.10004672408103943, -0.017670372501015663, -0.09787330031394958, -0.04303795471787453, 0.006750728935003281, 0.062186699360609055, 0.029245946556329727, -0.010524610988795757, 0.005238933023065329, 0.10426529496908188, -0.011727266944944859, -0.057499442249536514, -0.06817036867141724, -0.05679231882095337, -0.11549807339906693, -0.029110923409461975, 0.01132306270301342, -0.09166130423545837, 0.03751827031373978, 0.25095632672309875, -0.011102326214313507, 0.023794081062078476, -0.08157837390899658, 0.018867772072553635, 0.11583838611841202, 0.0401935875415802, -0.07070327550172806, -0.04293376952409744, -0.02853574976325035, 0.025157319381833076, 0.04257309064269066, -0.021126259118318558, 0.010187334381043911, 0.06277208775281906, 0.029379205778241158, 0.10137610882520676, 0.12438547611236572, 0.037069641053676605, -0.07363402843475342, -0.02086784690618515, 0.22853578627109528, -0.13867546617984772, -0.022637026384472847, 0.010869580321013927, -0.043227218091487885, -0.13598626852035522, 0.09403879195451736, 0.009827305562794209, -0.043660614639520645, 0.1414501816034317, -0.03588775172829628, -0.08764931559562683, -0.07549681514501572, -0.05460863932967186, 0.05131004378199577, 0.003843925194814801, -0.03628448769450188, -0.09385193139314651, -0.08955395221710205, -0.08876295387744904, 0.02394082583487034, -0.052687641233205795, -0.03312602639198303, 0.01597917452454567, -0.004049734212458134, -0.017651475965976715, -0.022329717874526978, 0.09168920665979385, -0.059426721185445786, 0.025354795157909393, 0.0015233933227136731, 0.019165365025401115, 0.07614661753177643, 0.03990460932254791, -0.112111397087574, 0.0822373554110527, -0.11829826980829239, 0.09088001400232315, -0.11857035011053085, -0.005184717010706663, -0.1330120712518692, -0.006222758442163467, -0.021813329309225082, 0.036139652132987976, -0.014633805491030216, 0.10205406695604324, -0.22768676280975342, 0.0037148722913116217, 0.14116978645324707, -0.1169452965259552, -0.10755370557308197, 0.0869046002626419, -0.044486161321401596, 0.056732937693595886, 0.04363420978188515, 0.12107998132705688, 0.11911327391862869, -0.05930357053875923, -0.10434474796056747, -0.07740616053342819, -0.027036551386117935, 0.1434832662343979, 0.07415182888507843, -0.0821414440870285, 0.11881571263074875, 0.04116969183087349, -0.010317218489944935, -0.09282960742712021, -0.004235586151480675, -0.0653759017586708, -0.012838339433073997, -0.07295265793800354, -0.06006687507033348, -0.013140888884663582, -0.07716505229473114, -0.01173695269972086, -0.07809813320636749, 0.013000783510506153, 0.09215878695249557, -0.025563344359397888, 0.01993933692574501, -0.06953628361225128, 0.04790303483605385, 0.021575704216957092, 0.014382670633494854, -0.20742900669574738, -0.11090333014726639, 0.021627089008688927, -0.15131700038909912, 0.04467353597283363, 0.03665237873792648, 0.013342936523258686, 0.039986319839954376, -0.000966181920375675, 0.024862078949809074, 0.02484235167503357, -0.003784656058996916, -0.017866920679807663, -0.14100883901119232, -0.0636916235089302, -0.0771913081407547, 0.06728062033653259, -0.12451032549142838, -0.007575143128633499, 0.08060788363218307, 0.16511376202106476, 0.016171736642718315, -0.08376449346542358, 0.06839663535356522, 0.01127108745276928, -0.0414821058511734, -0.04322082921862602, 0.0046390932984650135, -0.027238136157393456, 0.01632009632885456, 0.029821887612342834, -0.20048387348651886, -0.1274581104516983, 0.06898249685764313, 0.11240214109420776, -0.05770719423890114, -0.0939035564661026, -0.07318483293056488, -0.06749632209539413, -0.09201619029045105, -0.07524530589580536, 0.07081016153097153, 0.08398821204900742, 0.045417577028274536, -0.060294605791568756, -0.06202371418476105, 0.007778452709317207, 0.048329103738069534, -0.06603918224573135, 0.11294783651828766, 0.0856780856847763, -0.040103524923324585, 0.10007259249687195, -0.035810988396406174, 0.09638481587171555, 0.052409540861845016, 0.033250629901885986, -0.0922604650259018, 0.012740161269903183, 0.04966942220926285, 0.04778853431344032, 0.06919831037521362, -0.053560927510261536, 0.02780560590326786, 0.07572552561759949, 0.005035179667174816, 0.03924960270524025, -0.06156139075756073, 0.030585769563913345, 0.04673681780695915, 0.0017532941419631243, 0.04597659409046173, 0.017091622576117516, -0.0073873321525752544, 0.06999853998422623, 0.02630077674984932, 0.08245651423931122, -0.01919727213680744, -0.05765404552221298, -0.1055675819516182, 0.1390553116798401, -0.07966507226228714, -0.26271921396255493, -0.16219577193260193, -0.02755526825785637, -0.048105984926223755, -0.00923348031938076, 0.07651377469301224, -0.008447924628853798, -0.10271233320236206, -0.11137612909078598, 0.04348763823509216, 0.02101299911737442, -0.1117745190858841, -0.05907278135418892, 0.06008411571383476, 0.009647388942539692, -0.16249315440654755, 0.04020938649773598, 0.04701235890388489, -0.05432678014039993, -0.00666649267077446, 0.08473607897758484, 0.14044198393821716, 0.07272236049175262, 0.06598389893770218, -0.026173831894993782, -0.01258607767522335, 0.18316644430160522, -0.0964716374874115, 0.017572782933712006, 0.11991146206855774, -0.06392396241426468, 0.06628311425447464, 0.1765003502368927, 0.014416484162211418, -0.10126796364784241, 0.055201176553964615, 0.10123404860496521, -0.07275275141000748, -0.23772501945495605, -0.1151212677359581, -0.015217026695609093, 0.02209506742656231, 0.1047925353050232, 0.06477779150009155, -0.003330240026116371, 0.01705615036189556, -0.1191219687461853, -0.020740482956171036, -0.042353805154561996, 0.07265046238899231, 0.05644262954592705, -0.011386443860828876, 0.045493822544813156, -0.03396277129650116, 0.023079248145222664, 0.11782725900411606, 0.04896627366542816, 0.13849131762981415, -0.037259191274642944, 0.1799854338169098, 0.09805494546890259, 0.08252423256635666, -0.04735579341650009, 0.0301777645945549, -0.013022101484239101, 0.07711014151573181, -0.012467673979699612, -0.09956757724285126, -0.065056212246418, 0.11926604062318802, 0.011823675595223904, -0.07282045483589172, 0.012813764624297619, -0.04922197014093399, 0.047824691981077194, 0.20072035491466522, -0.024875562638044357, -0.13594746589660645, -0.056866955012083054, 0.06400328874588013, -0.035917386412620544, -0.06872180849313736, -0.005416381172835827, 0.07811237126588821, -0.13560327887535095, 0.020797880366444588, -0.032383862882852554, 0.08052883297204971, -0.15054939687252045, -0.017200954258441925, -0.017542660236358643, 0.02667163498699665, 0.006662619300186634, 0.11315099149942398, -0.1448129415512085, 0.11183001101016998, -0.004076662007719278, 0.0032303831540048122, -0.10383445769548416, 0.044716957956552505, -0.05677473172545433, -0.04701453819870949, 0.14195750653743744, -0.01478147879242897, -0.10376398265361786, -0.05139962583780289, -0.11138800531625748, -0.0069471802562475204, 0.06134303659200668, -0.12783654034137726, 0.10315453261137009, 0.02772788517177105, -0.019678160548210144, -0.0266053956001997, -0.013371650129556656, -0.10635242611169815, -0.23248492181301117, 0.10606801509857178, -0.10033924132585526, 0.0627305656671524, -0.06651191413402557, -0.039404209703207016, -0.048699986189603806, 0.1664075404405594, -0.10842792689800262, -0.05880722403526306, -0.11071858555078506, 0.008735104463994503, 0.16530875861644745, -0.04501672461628914, 0.06760317832231522, -0.02869817055761814, 0.1694411039352417, -0.013920167461037636, -0.04336920753121376, -0.0037646715063601732, -0.09701258689165115, -0.1831168532371521, -0.04833805561065674, 0.11422325670719147, 0.06819892674684525, 0.01573796197772026, -0.003643066855147481, 0.026348112151026726, 0.0228264220058918, -0.093579962849617, 0.03397710993885994, 0.1581142693758011, 0.12582160532474518, 0.031402938067913055, -0.016199396923184395, -0.08759909868240356, -0.10078293085098267, -0.10755263268947601, 0.05457612872123718, 0.16932375729084015, -0.070565365254879, 0.1678985208272934, 0.1317349225282669, -0.0988883525133133, -0.18512137234210968, -0.07425075024366379, 0.025093043223023415, -0.020063230767846107, 0.1235959529876709, -0.1824779361486435, 0.05568930134177208, 0.06901068240404129, -0.024603407829999924, 0.05685442313551903, -0.27723103761672974, -0.14364531636238098, 0.02322438359260559, 0.04412322863936424, -0.2234189212322235, -0.17606393992900848, -0.111672542989254, -0.04579298570752144, -0.14285965263843536, 0.11060883104801178, 0.010176698677241802, 0.01651354692876339, -0.017040356993675232, 0.07541216164827347, 0.05662403255701065, -0.06504689902067184, 0.1342087835073471, -0.00127067556604743, 0.019065288826823235, -0.11074315756559372, -0.014223765581846237, -0.019535766914486885, -0.052431706339120865, 0.09458689391613007, 0.02508031763136387, 0.059955596923828125, -0.09247402846813202, -0.036534976214170456, -0.0441250279545784, 0.04734070226550102, -0.0642419382929802, -0.05217895656824112, -0.07135274261236191, 0.07745226472616196, 0.0918596088886261, -0.006185018923133612, 0.03765702620148659, -0.04135933890938759, 0.028450872749090195, 0.20697447657585144, 0.1257595270872116, 0.0404968336224556, -0.13031096756458282, -0.026822876185178757, -0.012531742453575134, -0.0026839962229132652, -0.12488239258527756, 0.043922409415245056, 0.09156946837902069, 0.040878672152757645, 0.07780846208333969, -0.02124081924557686, -0.18407176434993744, 0.010386168025434017, 0.0891483947634697, -0.11958339065313339, -0.21413283050060272, 0.032712046056985855, 0.1513545662164688, -0.1617565155029297, -0.04736091569066048, 0.09218286722898483, 0.005047791171818972, -0.0281220730394125, -0.0022647278383374214, 0.07887569814920425, 0.04929333180189133, 0.10684002935886383, 0.018122544512152672, 0.05015880987048149, -0.06182234734296799, 0.10210894048213959, 0.14375752210617065, -0.1195698231458664, 0.02513214945793152, 0.034407150000333786, -0.06549891084432602, -0.06347870826721191, 0.004423022735863924, -0.017313960939645767, 0.01011720485985279, -0.046914152801036835, 0.00982652697712183, 0.000007405055839626584, 0.04209263250231743, 0.14560015499591827, 0.011964751407504082, 0.04476066678762436, 0.0357346385717392, -0.000029118980819475837, -0.06178081035614014, 0.09871511161327362, 0.03304068744182587, 0.047446902841329575, -0.05737512558698654, 0.02650705724954605, 0.008894061669707298, -0.017132123932242393, 0.01771378703415394, -0.030184485018253326, -0.07192429900169373, 0.007705627474933863, -0.1543593555688858, 0.05010305345058441, -0.0755474790930748, 0.008849686942994595, 0.007805350702255964, -0.018206514418125153, -0.003957875072956085, 0.009849677793681622, -0.061279911547899246, -0.052685946226119995, -0.04957002401351929, 0.1251172423362732, -0.20102934539318085, -0.00816257856786251, 0.09556829184293747, -0.06347829103469849, 0.07271609455347061, -0.011876893229782581, -0.016413137316703796, 0.01611560583114624, -0.07567748427391052, -0.0006813177024014294, -0.019243838265538216, 0.058575522154569626, 0.011403821408748627, -0.1387263983488083, -0.028028665110468864, 0.0015801388071849942, -0.08879972994327545, -0.011719259433448315, 0.048342689871788025, -0.15686918795108795, 0.02931877039372921, 0.08136342465877533, -0.035344649106264114, -0.04954144358634949, 0.03279488533735275, 0.04699146747589111, 0.015885308384895325, 0.09719832241535187, -0.00034553141449578106, 0.04272369295358658, -0.14083631336688995, -0.041126176714897156, 0.00583650404587388, 0.004269903060048819, 0.024133261293172836, 0.02166382409632206, 0.032812248915433884, -0.006131727714091539, 0.22127024829387665, 0.0001180207182187587, 0.07080084830522537, 0.03165756165981293, -0.020706970244646072, -0.01263697911053896, 0.040943827480077744, 0.019861798733472824, 0.007667523343116045, 0.03179897740483284, 0.027815548703074455, -0.02093484066426754, -0.052091605961322784, -0.04789571091532707, 0.06188281625509262, 0.13909292221069336, 0.1391899585723877, -0.038651637732982635, 0.07634586095809937, -0.161446675658226, -0.05459298565983772, 0.007474848534911871, -0.03475936874747276, 0.05039985850453377, -0.07900724560022354, 0.04768548160791397, 0.07369904965162277, -0.09896460175514221, 0.13999633491039276, -0.059907566756010056, -0.033892471343278885, -0.04744693636894226, -0.150461807847023, -0.046453867107629776, 0.0008663009502924979, 0.009748744778335094, -0.09520502388477325, 0.09579898416996002, 0.13647963106632233, -0.015146818943321705, 0.007413117215037346, 0.08844608068466187, -0.06314683705568314, -0.04331415146589279, -0.041021429002285004, -0.0014176992699503899, 0.004782079719007015, -0.004123741295188665, 0.06632658839225769, 0.021793480962514877, 0.06486546248197556, 0.05092715471982956, 0.09395890682935715, 0.039609283208847046, 0.008430772460997105, -0.030182993039488792, -0.06109561398625374, -0.010047998279333115, -0.01706419326364994, -0.045512646436691284, 0.19808568060398102, 0.06280198693275452, 0.013115277513861656, 0.008943420834839344, 0.21565856039524078, -0.006139041855931282, -0.04797119274735451, -0.13469551503658295, 0.10449257493019104, -0.0003640323120635003, 0.020266152918338776, 0.020202670246362686, -0.1377446949481964, 0.029482673853635788, 0.16934441030025482, 0.10839907079935074, 0.04193122684955597, 0.013792668469250202, 0.028958464041352272, 0.02628728747367859, -0.021912744268774986, 0.03244529664516449, 0.031928740441799164, 0.1989481896162033, -0.05397205054759979, 0.05337517708539963, -0.0020973493810743093, -0.000303555658319965, -0.01846521534025669, 0.09062310308218002, -0.04123547673225403, 0.024258527904748917, -0.05927164480090141, 0.10062719136476517, -0.05334116891026497, -0.2704649865627289, -0.02938821353018284, -0.08777573704719543, -0.11974038183689117, -0.015072801150381565, 0.03169409930706024, -0.024006176739931107, 0.04175404831767082, 0.03205869719386101, -0.03325595334172249, 0.19963569939136505, 0.011083975434303284, -0.09338503330945969, -0.06977476179599762, 0.05814381688833237, -0.011607829481363297, 0.266622394323349, -0.009503432549536228, 0.06892925500869751, 0.09013786911964417, -0.019443221390247345, -0.14322295784950256, 0.009709488600492477, 0.10010305047035217, -0.043853942304849625, 0.047632526606321335, 0.16471898555755615, -0.032923854887485504, 0.1647922247648239, 0.03529280051589012, -0.039388012140989304, 0.061814095824956894, 0.06607507914304733, 0.04409095644950867, -0.09263890236616135, 0.08171725273132324, -0.08855945616960526, 0.1409619301557541, 0.10685694217681885, -0.030307766050100327, -0.0025077632162719965, -0.051596831530332565, 0.06300856173038483, -0.03978700563311577, 0.12274326384067535, -0.003916438203305006, -0.16371281445026398, 0.03442566841840744, 0.022672947496175766, 0.047127414494752884, -0.2170409858226776, -0.06983339786529541, 0.12836207449436188, -0.03801973536610603, 0.021390270441770554, 0.08377755433320999, 0.05620435252785683, 0.0029124452266842127, -0.0745227262377739, -0.0809645876288414, 0.002563253277912736, 0.12146618217229843, -0.0925910472869873, -0.051758844405412674 ]
fb3518c2404d11ae931950507a982b381b032ede
This MCQ enables to evaluate models on the particular scope of maisons France services. This v1 is generated and improved thanks to non-expert knowledge.
AgentPublic/MCQ-eval
[ "license:etalab-2.0", "region:us" ]
2024-01-25T13:40:30+00:00
{"license": "etalab-2.0"}
2024-01-25T13:52:49+00:00
[]
[]
TAGS #license-etalab-2.0 #region-us
This MCQ enables to evaluate models on the particular scope of maisons France services. This v1 is generated and improved thanks to non-expert knowledge.
[]
[ "TAGS\n#license-etalab-2.0 #region-us \n" ]
[ 14 ]
[ "passage: TAGS\n#license-etalab-2.0 #region-us \n" ]
[ 0.010522027499973774, 0.12321154028177261, -0.00952222477644682, -0.008732665330171585, 0.015924453735351562, 0.050910428166389465, 0.18831148743629456, 0.05463062971830368, 0.19867603480815887, -0.04927174374461174, 0.12824665009975433, 0.102229543030262, -0.015729598701000214, 0.1834474354982376, -0.026150189340114594, -0.11243932694196701, 0.042105499655008316, -0.01645311340689659, 0.08917203545570374, 0.0022811181843280792, 0.028168698772788048, -0.04584706574678421, 0.03410365805029869, -0.050161346793174744, -0.04317186772823334, 0.05371161922812462, 0.061267461627721786, -0.0665600448846817, 0.08993995934724808, -0.00833035446703434, 0.1204143762588501, 0.06424582004547119, 0.007543379906564951, -0.2553959786891937, -0.0006873371894471347, -0.06436537951231003, -0.12547099590301514, 0.032622840255498886, 0.007632835768163204, 0.020193491131067276, 0.06354686617851257, 0.1185949519276619, -0.00801101978868246, 0.010265066288411617, -0.20369206368923187, -0.19919849932193756, -0.13284920156002045, -0.010132775641977787, 0.06262820959091187, 0.01748054288327694, 0.08268178254365921, 0.179743692278862, -0.09692546725273132, -0.002849761862307787, 0.08129411190748215, -0.3520142734050751, 0.07853397727012634, 0.06936454027891159, 0.043669648468494415, 0.10652217268943787, 0.0036756694316864014, 0.07447439432144165, 0.13046658039093018, -0.03322095796465874, -0.047868598252534866, -0.06886334717273712, -0.07813366502523422, 0.11028865724802017, -0.02288181334733963, -0.09284117817878723, 0.31523701548576355, 0.021341556683182716, -0.04725536331534386, 0.1435658186674118, -0.019560065120458603, -0.008384284563362598, 0.04234050214290619, -0.015734035521745682, 0.08021882176399231, 0.11729402095079422, 0.16267834603786469, -0.07979188859462738, -0.18636681139469147, -0.035346850752830505, -0.26640355587005615, 0.1238214448094368, 0.000871302851010114, 0.12984517216682434, -0.17442572116851807, -0.008444358594715595, -0.19631987810134888, -0.028212852776050568, -0.06799594312906265, -0.05607912316918373, 0.10545918345451355, -0.002212940715253353, -0.012748714536428452, 0.13664184510707855, 0.05457622557878494, 0.2537224292755127, -0.06881800293922424, -0.008854789659380913, -0.0946672260761261, 0.1731802076101303, -0.017915483564138412, 0.0488339327275753, 0.1965263932943344, 0.057318370789289474, 0.04439804330468178, -0.15305426716804504, 0.09133147448301315, -0.029059022665023804, -0.19356676936149597, -0.005790548399090767, -0.157599538564682, 0.1551453024148941, -0.0278916135430336, -0.13914351165294647, -0.10283786058425903, 0.07037805765867233, 0.11096431314945221, -0.019544346258044243, 0.03300859034061432, -0.0055356333032250404, 0.004166181664913893, -0.10224751383066177, -0.038736626505851746, 0.03575993329286575, 0.09499481320381165, 0.05933450162410736, -0.12831319868564606, -0.014884280040860176, -0.0038103838451206684, 0.03608099743723869, 0.15299703180789948, -0.11700001358985901, 0.05920926108956337, -0.13627828657627106, -0.17392677068710327, 0.05276240035891533, 0.02989843487739563, -0.024920783936977386, 0.057363349944353104, 0.04076452553272247, 0.07190781086683273, -0.03001696802675724, -0.11077133566141129, -0.1666228175163269, -0.08929000049829483, 0.06611588597297668, -0.025618065148591995, -0.0005246748332865536, -0.2745715379714966, -0.018654821440577507, -0.1277245730161667, 0.0489107221364975, -0.0006162791978567839, -0.12576007843017578, -0.09597741067409515, 0.12842273712158203, -0.028721213340759277, 0.031822338700294495, -0.09106096625328064, 0.023642443120479584, -0.009995569474995136, 0.07758326083421707, -0.10764005780220032, -0.06289495527744293, 0.10817799717187881, -0.1530819982290268, -0.15744633972644806, 0.0159804318100214, -0.007357580587267876, 0.06570841372013092, 0.06138971075415611, 0.3643292784690857, -0.10338921844959259, -0.14255055785179138, 0.09267732501029968, 0.1272915154695511, -0.13307343423366547, -0.2770015299320221, 0.14654670655727386, -0.13320155441761017, -0.10571514815092087, -0.008460228331387043, -0.022303111851215363, 0.047861531376838684, -0.05120791867375374, -0.06847751140594482, 0.03563319519162178, -0.0056641059927642345, 0.016672490164637566, -0.016000928357243538, 0.06389850378036499, -0.07957662642002106, 0.06273537874221802, 0.06149664148688316, 0.012590896338224411, 0.13590584695339203, 0.03472539782524109, -0.07561908662319183, 0.0823044404387474, -0.003879725933074951, -0.044262226670980453, -0.024816030636429787, -0.0792536586523056, 0.0331086702644825, -0.017943404614925385, 0.11242619156837463, 0.14478713274002075, 0.023781435564160347, -0.029026899486780167, 0.011211562901735306, 0.032220035791397095, -0.012587330304086208, 0.04387868195772171, 0.021285325288772583, -0.07222257554531097, 0.06838327646255493, -0.05039684474468231, -0.05147368088364601, -0.05557207763195038, -0.0340900793671608, 0.02966914512217045, -0.05191661790013313, 0.017095528542995453, 0.07043114304542542, -0.06477517634630203, -0.016867155209183693, -0.003543168306350708, 0.021729685366153717, 0.11124054342508316, 0.006777151022106409, -0.06107543036341667, 0.1629398763179779, -0.05235163867473602, 0.25800177454948425, 0.162602037191391, -0.07749491184949875, 0.03306430205702782, -0.08243013173341751, 0.017431987449526787, 0.00317067326977849, 0.06408310681581497, 0.009863801300525665, -0.033721160143613815, -0.04082696884870529, 0.05030733719468117, -0.05616471543908119, 0.04092109948396683, -0.026618581265211105, -0.10544238984584808, -0.1304980218410492, 0.016236795112490654, 0.2618745267391205, -0.20738044381141663, 0.12204840779304504, 0.44997310638427734, 0.08096322417259216, 0.16065208613872528, -0.08220897614955902, -0.033638231456279755, -0.08559396862983704, -0.005184912122786045, -0.023689432069659233, 0.14581915736198425, -0.05600970238447189, 0.019755147397518158, 0.053209345787763596, 0.05887797102332115, 0.06858951598405838, -0.19783443212509155, -0.12669771909713745, -0.006788340397179127, -0.04027458652853966, -0.1874428540468216, 0.024554571136832237, -0.07602055370807648, 0.037397682666778564, 0.030918210744857788, -0.13127924501895905, 0.1452874392271042, -0.004021414555609226, -0.07660964876413345, 0.07223322242498398, -0.1998240053653717, -0.10355881601572037, -0.21567749977111816, -0.1099078357219696, 0.051788218319416046, 0.06940604001283646, 0.09467636048793793, 0.003937711473554373, -0.06274440884590149, 0.03340012580156326, -0.025482578203082085, -0.11990901082754135, -0.03612736985087395, 0.04911106079816818, 0.0920034795999527, -0.048546791076660156, -0.10161381214857101, -0.0741739496588707, -0.04930587112903595, 0.012806958518922329, 0.07512509822845459, -0.1058899536728859, 0.0812147930264473, 0.11672309041023254, -0.013399677351117134, 0.038624875247478485, -0.06792197376489639, 0.12017250061035156, -0.020337004214525223, -0.10230222344398499, 0.1352948397397995, -0.019527561962604523, 0.0581996887922287, 0.1530035138130188, 0.06372879445552826, -0.11287757009267807, -0.019248362630605698, -0.10621673613786697, -0.13400520384311676, -0.2954043447971344, -0.0492330938577652, -0.06873840093612671, 0.13035735487937927, 0.0078100478276610374, 0.08288692682981491, 0.09244151413440704, 0.022418566048145294, 0.08927451074123383, 0.017176492139697075, -0.034352097660303116, 0.051241107285022736, 0.20384147763252258, -0.02720056287944317, -0.016052134335041046, -0.1372879147529602, 0.015130423940718174, 0.19365447759628296, 0.11492323875427246, 0.18476267158985138, 0.3129860758781433, 0.10898337513208389, 0.12383866310119629, 0.11650354415178299, 0.10274460166692734, 0.06261476874351501, 0.04669094830751419, 0.0033595028799027205, -0.08168806880712509, -0.005983837880194187, 0.0027388299349695444, 0.08998528122901917, 0.0267320666462183, -0.19892214238643646, 0.009486321359872818, -0.20840290188789368, -0.00433061458170414, -0.07968339323997498, 0.13229233026504517, -0.08074229210615158, 0.1242629662156105, 0.10427795350551605, 0.05585578456521034, -0.033828333020210266, 0.10815601795911789, -0.02340179868042469, -0.037969544529914856, 0.07814594358205795, 0.040279287844896317, 0.06049076095223427, 0.09290051460266113, 0.03645368292927742, -0.09839999675750732, -0.1563061773777008, 0.045388367027044296, 0.11398477107286453, -0.17032530903816223, 0.33642953634262085, 0.0248546339571476, -0.0708823949098587, -0.001570883672684431, -0.06720402091741562, 0.013092049397528172, 0.17187874019145966, 0.10438801348209381, 0.04143481329083443, -0.23325875401496887, -0.11194349825382233, -0.016536718234419823, -0.015620862133800983, 0.09805824607610703, 0.016212940216064453, -0.10115690529346466, -0.04106449335813522, 0.05893118679523468, 0.0077683571726083755, 0.11987990885972977, -0.045047782361507416, -0.05033420771360397, -0.002001536078751087, 0.11793717741966248, -0.039270903915166855, -0.04350544884800911, 0.027324650436639786, -0.018717117607593536, 0.08363766223192215, -0.10466895997524261, 0.03280507028102875, -0.054014526307582855, -0.22248686850070953, 0.04395744577050209, -0.05825147405266762, 0.014430548995733261, -0.022985080257058144, -0.0920945554971695, -0.09024175256490707, -0.16442859172821045, 0.10466236621141434, -0.03268670290708542, 0.03155532106757164, -0.06262237578630447, 0.10947249829769135, -0.1126529648900032, 0.0359562486410141, 0.016671741381287575, 0.03387906402349472, -0.018341057002544403, -0.08890911191701889, 0.1064932644367218, -0.06888726353645325, 0.04004068300127983, 0.043152857571840286, -0.054217614233493805, 0.020743723958730698, 0.052542202174663544, -0.08926168829202652, 0.16478048264980316, 0.435123085975647, -0.04156995937228203, 0.22245226800441742, 0.30429795384407043, -0.1582474708557129, -0.2157389521598816, -0.1287916600704193, -0.24072211980819702, -0.08133457601070404, 0.1665303260087967, -0.1611458957195282, 0.008700188249349594, 0.18407738208770752, -0.11609715968370438, 0.2604495584964752, -0.14344674348831177, -0.06179102882742882, 0.12929929792881012, -0.045094143599271774, 0.4686415493488312, -0.0863395407795906, -0.1266855150461197, -0.0456378273665905, -0.17664273083209991, 0.15554915368556976, -0.03719055652618408, 0.05591331049799919, 0.003609764389693737, -0.006228064652532339, -0.026151319965720177, -0.003798331134021282, 0.22259201109409332, -0.04367952421307564, 0.10133056342601776, -0.06959540396928787, -0.13777105510234833, 0.21876198053359985, -0.04394279420375824, 0.031038351356983185, -0.03795163333415985, -0.04900386556982994, -0.09369988739490509, 0.0037004773039370775, 0.009950301609933376, 0.08671604096889496, 0.018842872232198715, -0.07776308804750443, -0.09339136630296707, -0.005549993831664324, -0.11706814169883728, -0.03678738325834274, 0.355413556098938, 0.03913413733243942, 0.04030286893248558, 0.1167459487915039, -0.018768474459648132, -0.14395321905612946, 0.0639910027384758, -0.06507387012243271, -0.08753174543380737, 0.038472600281238556, -0.1385446935892105, 0.012525391764938831, 0.1067148894071579, -0.03682457655668259, 0.08757415413856506, 0.07300712168216705, -0.08834418654441833, 0.05077783763408661, 0.18347303569316864, -0.06425706297159195, -0.07314692437648773, 0.050560470670461655, 0.012948719784617424, 0.15516485273838043, 0.0637596845626831, 0.08913002908229828, 0.056491076946258545, 0.013633610680699348, -0.008467177860438824, 0.023586289957165718, -0.13246704638004303, -0.01613611727952957, 0.04475168138742447, -0.03172538802027702, -0.09922332316637039, 0.21864792704582214, 0.06297308951616287, 0.009673544205725193, -0.010914335958659649, 0.06113145872950554, -0.02313625067472458, -0.06552477926015854, -0.1459077149629593, 0.0672338455915451, -0.21179509162902832, -0.13535238802433014, -0.0018222638173028827, -0.04735197126865387, -0.013356863521039486, 0.06256521493196487, 0.05242949724197388, 0.10570299625396729, 0.029455313459038734, 0.017615532502532005, 0.12789909541606903, -0.08174478262662888, -0.1651628017425537, 0.024944579228758812, -0.08970192074775696, -0.035024430602788925, 0.005046182312071323, 0.07955920696258545, -0.06113743782043457, -0.0670204609632492, -0.136760413646698, 0.08724167943000793, -0.10505954176187515, 0.0482604056596756, -0.06649678945541382, -0.031083276495337486, 0.06146158277988434, -0.06704714894294739, -0.09089143574237823, 0.005082832649350166, -0.1795457899570465, 0.03956564515829086, 0.01843278855085373, 0.08707796037197113, -0.07444344460964203, -0.058391764760017395, 0.09179803729057312, 0.06930097937583923, 0.09159992635250092, 0.06714819371700287, 0.050123460590839386, 0.17544293403625488, -0.12234457582235336, -0.03041679412126541, 0.1236361414194107, 0.02367466315627098, 0.035877496004104614, 0.06106803938746452, -0.05794839560985565, 0.0895707979798317, -0.04087306186556816, 0.03244805335998535, -0.11603254824876785, -0.13890840113162994, -0.1127425953745842, 0.00162024877499789, -0.26011598110198975, 0.03452039137482643, -0.15362511575222015, 0.11396796256303787, -0.00914730317890644, 0.1576332449913025, 0.07289867103099823, -0.025364935398101807, 0.02555982954800129, 0.016217513009905815, 0.00797333661466837, -0.06784240156412125, -0.11918173730373383, -0.06540174037218094, -0.10291293263435364, -0.01160176657140255, 0.3574490249156952, 0.04922269284725189, -0.13610225915908813, 0.046309709548950195, 0.025022562593221664, 0.006156100891530514, -0.014280570670962334, 0.2269570231437683, 0.03590117022395134, -0.008949246257543564, -0.20109564065933228, 0.015807321295142174, -0.015311209484934807, -0.15945924818515778, 0.14870119094848633, 0.10647079348564148, 0.05138605833053589, 0.07510961592197418, 0.10358062386512756, -0.0704946294426918, -0.09912285208702087, -0.1350184828042984, 0.1247386559844017, 0.00848048459738493, 0.07938885688781738, 0.06937912851572037, 0.18140238523483276, -0.05677294731140137, 0.012117680162191391, -0.06033721938729286, 0.013234402053058147, -0.19497545063495636, -0.10873565077781677, -0.03065650910139084, -0.11971284449100494, 0.07081177085638046, -0.006348669063299894, 0.09076568484306335, 0.18297138810157776, 0.013929832726716995, -0.05831039696931839, -0.09828504920005798, -0.16489627957344055, -0.07737546414136887, -0.03938708081841469, 0.0057807015255093575, 0.015406979247927666, -0.12565594911575317, -0.10328246653079987, -0.02042802795767784, -0.12776058912277222, -0.046642646193504333, 0.0147533118724823, 0.03497692570090294, -0.0322096161544323, -0.10800990462303162, -0.024415621533989906, -0.0683293342590332, 0.08485029637813568, -0.030202627182006836, 0.10241378843784332, 0.004461515229195356, 0.033296555280685425, 0.08856245130300522, 0.0808705985546112, -0.022330766543745995, -0.06672833114862442, 0.026088403537869453, 0.16599929332733154, 0.049957145005464554, 0.15192024409770966, -0.056909024715423584, -0.03670070692896843, -0.04212838038802147, 0.15731409192085266, 0.23045536875724792, -0.04021279513835907, -0.015913933515548706, -0.024463782086968422, 0.03626824915409088, 0.04910401999950409, 0.17388202250003815, 0.010864146053791046, 0.2331053465604782, -0.050372716039419174, -0.03661824390292168, -0.027275986969470978, 0.11319870501756668, -0.06958403438329697, 0.05500836670398712, -0.004718523006886244, -0.07744739204645157, -0.10703069716691971, 0.09792939573526382, -0.06602016091346741, 0.03680628538131714, 0.1319778561592102, -0.1210411787033081, 0.013998948968946934, 0.008075199089944363, 0.1902293860912323, -0.02239890955388546, 0.08057212084531784, -0.11424977332353592, -0.11318957805633545, -0.07165303081274033, 0.030709438025951385, -0.3611427843570709, -0.15611232817173004, 0.09083586931228638, 0.04857003316283226, 0.23370353877544403, -0.002046472392976284, 0.13400141894817352, 0.006030190270394087, 0.06793951243162155, -0.0474272146821022, 0.15353447198867798, 0.04222545400261879, -0.007763159926980734, -0.08268328756093979, -0.16897447407245636, -0.04710531234741211, -0.021018853411078453, 0.059405889362096786, 0.0336456373333931, -0.0035022078081965446, 0.20087164640426636, -0.14205655455589294, -0.008952822536230087, -0.030630774796009064, -0.1504337191581726, 0.02747521363198757, -0.04835403710603714, 0.005006094463169575, -0.056701548397541046, -0.028780363500118256, 0.01059721503406763, 0.12821903824806213, -0.1670154184103012, -0.05147016420960426, 0.12021618336439133, -0.019106531515717506, 0.18687361478805542, 0.003986142110079527, -0.12279260158538818, -0.009823054075241089, -0.12799090147018433, 0.10385113209486008, -0.13001567125320435, 0.04841369763016701, 0.12280058860778809, -0.0056307679042220116, 0.006339730229228735, -0.2924460172653198, 0.030595295131206512, -0.008078532293438911, -0.03244485706090927, -0.07463748008012772 ]
fee49f7fc4f7ff101bf935ec04cbef405f00c11a
# Dataset Card for Evaluation run of dball/zephyr-7b-sft-qlora <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [dball/zephyr-7b-sft-qlora](https://huggingface.co/dball/zephyr-7b-sft-qlora) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_dball__zephyr-7b-sft-qlora", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-01-25T13:46:13.704337](https://huggingface.co/datasets/open-llm-leaderboard/details_dball__zephyr-7b-sft-qlora/blob/main/results_2024-01-25T13-46-13.704337.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.616234422874079, "acc_stderr": 0.032638077229522285, "acc_norm": 0.6220964683837684, "acc_norm_stderr": 0.03330602977644688, "mc1": 0.27539779681762544, "mc1_stderr": 0.01563813566777552, "mc2": 0.4232205129895114, "mc2_stderr": 0.014379155129720761 }, "harness|arc:challenge|25": { "acc": 0.5639931740614335, "acc_stderr": 0.014491225699230916, "acc_norm": 0.5972696245733788, "acc_norm_stderr": 0.014332236306790147 }, "harness|hellaswag|10": { "acc": 0.6214897430790679, "acc_stderr": 0.004840244782805302, "acc_norm": 0.8249352718581956, "acc_norm_stderr": 0.0037924580005234405 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.28, "acc_stderr": 0.04512608598542129, "acc_norm": 0.28, "acc_norm_stderr": 0.04512608598542129 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.5777777777777777, "acc_stderr": 0.04266763404099582, "acc_norm": 0.5777777777777777, "acc_norm_stderr": 0.04266763404099582 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.6644736842105263, "acc_stderr": 0.03842498559395268, "acc_norm": 0.6644736842105263, "acc_norm_stderr": 0.03842498559395268 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.56, "acc_stderr": 0.04988876515698589, "acc_norm": 0.56, "acc_norm_stderr": 0.04988876515698589 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.6716981132075471, "acc_stderr": 0.02890159361241178, "acc_norm": 0.6716981132075471, "acc_norm_stderr": 0.02890159361241178 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.6805555555555556, "acc_stderr": 0.038990736873573344, "acc_norm": 0.6805555555555556, "acc_norm_stderr": 0.038990736873573344 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.41, "acc_stderr": 0.049431107042371025, "acc_norm": 0.41, "acc_norm_stderr": 0.049431107042371025 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.54, "acc_stderr": 0.05009082659620333, "acc_norm": 0.54, "acc_norm_stderr": 0.05009082659620333 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.37, "acc_stderr": 0.048523658709391, "acc_norm": 0.37, "acc_norm_stderr": 0.048523658709391 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.6011560693641619, "acc_stderr": 0.0373362665538351, "acc_norm": 0.6011560693641619, "acc_norm_stderr": 0.0373362665538351 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.3431372549019608, "acc_stderr": 0.04724007352383887, "acc_norm": 0.3431372549019608, "acc_norm_stderr": 0.04724007352383887 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.79, "acc_stderr": 0.04093601807403326, "acc_norm": 0.79, "acc_norm_stderr": 0.04093601807403326 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.5148936170212766, "acc_stderr": 0.03267151848924777, "acc_norm": 0.5148936170212766, "acc_norm_stderr": 0.03267151848924777 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.4473684210526316, "acc_stderr": 0.04677473004491199, "acc_norm": 0.4473684210526316, "acc_norm_stderr": 0.04677473004491199 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.5448275862068965, "acc_stderr": 0.04149886942192117, "acc_norm": 0.5448275862068965, "acc_norm_stderr": 0.04149886942192117 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.4021164021164021, "acc_stderr": 0.02525303255499769, "acc_norm": 0.4021164021164021, "acc_norm_stderr": 0.02525303255499769 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.3412698412698413, "acc_stderr": 0.04240799327574924, "acc_norm": 0.3412698412698413, "acc_norm_stderr": 0.04240799327574924 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.37, "acc_stderr": 0.04852365870939099, "acc_norm": 0.37, "acc_norm_stderr": 0.04852365870939099 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.7225806451612903, "acc_stderr": 0.025470196835900055, "acc_norm": 0.7225806451612903, "acc_norm_stderr": 0.025470196835900055 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.4876847290640394, "acc_stderr": 0.035169204442208966, "acc_norm": 0.4876847290640394, "acc_norm_stderr": 0.035169204442208966 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.66, "acc_stderr": 0.04760952285695237, "acc_norm": 0.66, "acc_norm_stderr": 0.04760952285695237 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.7393939393939394, "acc_stderr": 0.034277431758165236, "acc_norm": 0.7393939393939394, "acc_norm_stderr": 0.034277431758165236 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.7929292929292929, "acc_stderr": 0.02886977846026705, "acc_norm": 0.7929292929292929, "acc_norm_stderr": 0.02886977846026705 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.8652849740932642, "acc_stderr": 0.02463978909770944, "acc_norm": 0.8652849740932642, "acc_norm_stderr": 0.02463978909770944 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.6076923076923076, "acc_stderr": 0.024756000382130956, "acc_norm": 0.6076923076923076, "acc_norm_stderr": 0.024756000382130956 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.3333333333333333, "acc_stderr": 0.02874204090394849, "acc_norm": 0.3333333333333333, "acc_norm_stderr": 0.02874204090394849 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.6764705882352942, "acc_stderr": 0.030388353551886797, "acc_norm": 0.6764705882352942, "acc_norm_stderr": 0.030388353551886797 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.3443708609271523, "acc_stderr": 0.03879687024073327, "acc_norm": 0.3443708609271523, "acc_norm_stderr": 0.03879687024073327 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.7981651376146789, "acc_stderr": 0.017208579357787572, "acc_norm": 0.7981651376146789, "acc_norm_stderr": 0.017208579357787572 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.5046296296296297, "acc_stderr": 0.03409825519163572, "acc_norm": 0.5046296296296297, "acc_norm_stderr": 0.03409825519163572 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.7549019607843137, "acc_stderr": 0.03019028245350195, "acc_norm": 0.7549019607843137, "acc_norm_stderr": 0.03019028245350195 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.7552742616033755, "acc_stderr": 0.027985699387036416, "acc_norm": 0.7552742616033755, "acc_norm_stderr": 0.027985699387036416 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.6860986547085202, "acc_stderr": 0.03114679648297246, "acc_norm": 0.6860986547085202, "acc_norm_stderr": 0.03114679648297246 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.7022900763358778, "acc_stderr": 0.040103589424622034, "acc_norm": 0.7022900763358778, "acc_norm_stderr": 0.040103589424622034 }, "harness|hendrycksTest-international_law|5": { "acc": 0.7851239669421488, "acc_stderr": 0.037494924487096966, "acc_norm": 0.7851239669421488, "acc_norm_stderr": 0.037494924487096966 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.75, "acc_stderr": 0.04186091791394607, "acc_norm": 0.75, "acc_norm_stderr": 0.04186091791394607 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.7730061349693251, "acc_stderr": 0.032910995786157686, "acc_norm": 0.7730061349693251, "acc_norm_stderr": 0.032910995786157686 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.5, "acc_stderr": 0.04745789978762494, "acc_norm": 0.5, "acc_norm_stderr": 0.04745789978762494 }, "harness|hendrycksTest-management|5": { "acc": 0.8155339805825242, "acc_stderr": 0.03840423627288276, "acc_norm": 0.8155339805825242, "acc_norm_stderr": 0.03840423627288276 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8717948717948718, "acc_stderr": 0.02190190511507333, "acc_norm": 0.8717948717948718, "acc_norm_stderr": 0.02190190511507333 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.73, "acc_stderr": 0.0446196043338474, "acc_norm": 0.73, "acc_norm_stderr": 0.0446196043338474 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.7969348659003831, "acc_stderr": 0.014385525076611573, "acc_norm": 0.7969348659003831, "acc_norm_stderr": 0.014385525076611573 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.708092485549133, "acc_stderr": 0.024476994076247337, "acc_norm": 0.708092485549133, "acc_norm_stderr": 0.024476994076247337 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.24134078212290502, "acc_stderr": 0.014310999547961459, "acc_norm": 0.24134078212290502, "acc_norm_stderr": 0.014310999547961459 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.7058823529411765, "acc_stderr": 0.02609016250427905, "acc_norm": 0.7058823529411765, "acc_norm_stderr": 0.02609016250427905 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.6913183279742765, "acc_stderr": 0.026236965881153262, "acc_norm": 0.6913183279742765, "acc_norm_stderr": 0.026236965881153262 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.7438271604938271, "acc_stderr": 0.024288533637726095, "acc_norm": 0.7438271604938271, "acc_norm_stderr": 0.024288533637726095 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.5035460992907801, "acc_stderr": 0.02982674915328092, "acc_norm": 0.5035460992907801, "acc_norm_stderr": 0.02982674915328092 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.44654498044328556, "acc_stderr": 0.01269704602439969, "acc_norm": 0.44654498044328556, "acc_norm_stderr": 0.01269704602439969 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.6213235294117647, "acc_stderr": 0.02946513363977613, "acc_norm": 0.6213235294117647, "acc_norm_stderr": 0.02946513363977613 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.6650326797385621, "acc_stderr": 0.019094228167000318, "acc_norm": 0.6650326797385621, "acc_norm_stderr": 0.019094228167000318 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6636363636363637, "acc_stderr": 0.04525393596302506, "acc_norm": 0.6636363636363637, "acc_norm_stderr": 0.04525393596302506 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.7183673469387755, "acc_stderr": 0.028795185574291286, "acc_norm": 0.7183673469387755, "acc_norm_stderr": 0.028795185574291286 }, "harness|hendrycksTest-sociology|5": { "acc": 0.8407960199004975, "acc_stderr": 0.02587064676616913, "acc_norm": 0.8407960199004975, "acc_norm_stderr": 0.02587064676616913 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.82, "acc_stderr": 0.038612291966536955, "acc_norm": 0.82, "acc_norm_stderr": 0.038612291966536955 }, "harness|hendrycksTest-virology|5": { "acc": 0.536144578313253, "acc_stderr": 0.038823108508905954, "acc_norm": 0.536144578313253, "acc_norm_stderr": 0.038823108508905954 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.8128654970760234, "acc_stderr": 0.029913127232368036, "acc_norm": 0.8128654970760234, "acc_norm_stderr": 0.029913127232368036 }, "harness|truthfulqa:mc|0": { "mc1": 0.27539779681762544, "mc1_stderr": 0.01563813566777552, "mc2": 0.4232205129895114, "mc2_stderr": 0.014379155129720761 }, "harness|winogrande|5": { "acc": 0.7821625887924231, "acc_stderr": 0.011601066079939324 }, "harness|gsm8k|5": { "acc": 0.3411675511751327, "acc_stderr": 0.013059111935831496 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_dball__zephyr-7b-sft-qlora
[ "region:us" ]
2024-01-25T13:48:34+00:00
{"pretty_name": "Evaluation run of dball/zephyr-7b-sft-qlora", "dataset_summary": "Dataset automatically created during the evaluation run of model [dball/zephyr-7b-sft-qlora](https://huggingface.co/dball/zephyr-7b-sft-qlora) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_dball__zephyr-7b-sft-qlora\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-01-25T13:46:13.704337](https://huggingface.co/datasets/open-llm-leaderboard/details_dball__zephyr-7b-sft-qlora/blob/main/results_2024-01-25T13-46-13.704337.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.616234422874079,\n \"acc_stderr\": 0.032638077229522285,\n \"acc_norm\": 0.6220964683837684,\n \"acc_norm_stderr\": 0.03330602977644688,\n \"mc1\": 0.27539779681762544,\n \"mc1_stderr\": 0.01563813566777552,\n \"mc2\": 0.4232205129895114,\n \"mc2_stderr\": 0.014379155129720761\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.5639931740614335,\n \"acc_stderr\": 0.014491225699230916,\n \"acc_norm\": 0.5972696245733788,\n \"acc_norm_stderr\": 0.014332236306790147\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6214897430790679,\n \"acc_stderr\": 0.004840244782805302,\n \"acc_norm\": 0.8249352718581956,\n \"acc_norm_stderr\": 0.0037924580005234405\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.28,\n \"acc_stderr\": 0.04512608598542129,\n \"acc_norm\": 0.28,\n \"acc_norm_stderr\": 0.04512608598542129\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.5777777777777777,\n \"acc_stderr\": 0.04266763404099582,\n \"acc_norm\": 0.5777777777777777,\n \"acc_norm_stderr\": 0.04266763404099582\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.6644736842105263,\n \"acc_stderr\": 0.03842498559395268,\n \"acc_norm\": 0.6644736842105263,\n \"acc_norm_stderr\": 0.03842498559395268\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.56,\n \"acc_stderr\": 0.04988876515698589,\n \"acc_norm\": 0.56,\n \"acc_norm_stderr\": 0.04988876515698589\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.6716981132075471,\n \"acc_stderr\": 0.02890159361241178,\n \"acc_norm\": 0.6716981132075471,\n \"acc_norm_stderr\": 0.02890159361241178\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.6805555555555556,\n \"acc_stderr\": 0.038990736873573344,\n \"acc_norm\": 0.6805555555555556,\n \"acc_norm_stderr\": 0.038990736873573344\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.41,\n \"acc_stderr\": 0.049431107042371025,\n \"acc_norm\": 0.41,\n \"acc_norm_stderr\": 0.049431107042371025\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.54,\n \"acc_stderr\": 0.05009082659620333,\n \"acc_norm\": 0.54,\n \"acc_norm_stderr\": 0.05009082659620333\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.37,\n \"acc_stderr\": 0.048523658709391,\n \"acc_norm\": 0.37,\n \"acc_norm_stderr\": 0.048523658709391\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.6011560693641619,\n \"acc_stderr\": 0.0373362665538351,\n \"acc_norm\": 0.6011560693641619,\n \"acc_norm_stderr\": 0.0373362665538351\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.3431372549019608,\n \"acc_stderr\": 0.04724007352383887,\n \"acc_norm\": 0.3431372549019608,\n \"acc_norm_stderr\": 0.04724007352383887\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.79,\n \"acc_stderr\": 0.04093601807403326,\n \"acc_norm\": 0.79,\n \"acc_norm_stderr\": 0.04093601807403326\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.5148936170212766,\n \"acc_stderr\": 0.03267151848924777,\n \"acc_norm\": 0.5148936170212766,\n \"acc_norm_stderr\": 0.03267151848924777\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.4473684210526316,\n \"acc_stderr\": 0.04677473004491199,\n \"acc_norm\": 0.4473684210526316,\n \"acc_norm_stderr\": 0.04677473004491199\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.5448275862068965,\n \"acc_stderr\": 0.04149886942192117,\n \"acc_norm\": 0.5448275862068965,\n \"acc_norm_stderr\": 0.04149886942192117\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.4021164021164021,\n \"acc_stderr\": 0.02525303255499769,\n \"acc_norm\": 0.4021164021164021,\n \"acc_norm_stderr\": 0.02525303255499769\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.3412698412698413,\n \"acc_stderr\": 0.04240799327574924,\n \"acc_norm\": 0.3412698412698413,\n \"acc_norm_stderr\": 0.04240799327574924\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.37,\n \"acc_stderr\": 0.04852365870939099,\n \"acc_norm\": 0.37,\n \"acc_norm_stderr\": 0.04852365870939099\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.7225806451612903,\n \"acc_stderr\": 0.025470196835900055,\n \"acc_norm\": 0.7225806451612903,\n \"acc_norm_stderr\": 0.025470196835900055\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.4876847290640394,\n \"acc_stderr\": 0.035169204442208966,\n \"acc_norm\": 0.4876847290640394,\n \"acc_norm_stderr\": 0.035169204442208966\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.66,\n \"acc_stderr\": 0.04760952285695237,\n \"acc_norm\": 0.66,\n \"acc_norm_stderr\": 0.04760952285695237\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.7393939393939394,\n \"acc_stderr\": 0.034277431758165236,\n \"acc_norm\": 0.7393939393939394,\n \"acc_norm_stderr\": 0.034277431758165236\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.7929292929292929,\n \"acc_stderr\": 0.02886977846026705,\n \"acc_norm\": 0.7929292929292929,\n \"acc_norm_stderr\": 0.02886977846026705\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.8652849740932642,\n \"acc_stderr\": 0.02463978909770944,\n \"acc_norm\": 0.8652849740932642,\n \"acc_norm_stderr\": 0.02463978909770944\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.6076923076923076,\n \"acc_stderr\": 0.024756000382130956,\n \"acc_norm\": 0.6076923076923076,\n \"acc_norm_stderr\": 0.024756000382130956\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.3333333333333333,\n \"acc_stderr\": 0.02874204090394849,\n \"acc_norm\": 0.3333333333333333,\n \"acc_norm_stderr\": 0.02874204090394849\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.6764705882352942,\n \"acc_stderr\": 0.030388353551886797,\n \"acc_norm\": 0.6764705882352942,\n \"acc_norm_stderr\": 0.030388353551886797\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.3443708609271523,\n \"acc_stderr\": 0.03879687024073327,\n \"acc_norm\": 0.3443708609271523,\n \"acc_norm_stderr\": 0.03879687024073327\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.7981651376146789,\n \"acc_stderr\": 0.017208579357787572,\n \"acc_norm\": 0.7981651376146789,\n \"acc_norm_stderr\": 0.017208579357787572\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.5046296296296297,\n \"acc_stderr\": 0.03409825519163572,\n \"acc_norm\": 0.5046296296296297,\n \"acc_norm_stderr\": 0.03409825519163572\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.7549019607843137,\n \"acc_stderr\": 0.03019028245350195,\n \"acc_norm\": 0.7549019607843137,\n \"acc_norm_stderr\": 0.03019028245350195\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.7552742616033755,\n \"acc_stderr\": 0.027985699387036416,\n \"acc_norm\": 0.7552742616033755,\n \"acc_norm_stderr\": 0.027985699387036416\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.6860986547085202,\n \"acc_stderr\": 0.03114679648297246,\n \"acc_norm\": 0.6860986547085202,\n \"acc_norm_stderr\": 0.03114679648297246\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.7022900763358778,\n \"acc_stderr\": 0.040103589424622034,\n \"acc_norm\": 0.7022900763358778,\n \"acc_norm_stderr\": 0.040103589424622034\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.7851239669421488,\n \"acc_stderr\": 0.037494924487096966,\n \"acc_norm\": 0.7851239669421488,\n \"acc_norm_stderr\": 0.037494924487096966\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.75,\n \"acc_stderr\": 0.04186091791394607,\n \"acc_norm\": 0.75,\n \"acc_norm_stderr\": 0.04186091791394607\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.7730061349693251,\n \"acc_stderr\": 0.032910995786157686,\n \"acc_norm\": 0.7730061349693251,\n \"acc_norm_stderr\": 0.032910995786157686\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.5,\n \"acc_stderr\": 0.04745789978762494,\n \"acc_norm\": 0.5,\n \"acc_norm_stderr\": 0.04745789978762494\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.8155339805825242,\n \"acc_stderr\": 0.03840423627288276,\n \"acc_norm\": 0.8155339805825242,\n \"acc_norm_stderr\": 0.03840423627288276\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8717948717948718,\n \"acc_stderr\": 0.02190190511507333,\n \"acc_norm\": 0.8717948717948718,\n \"acc_norm_stderr\": 0.02190190511507333\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.73,\n \"acc_stderr\": 0.0446196043338474,\n \"acc_norm\": 0.73,\n \"acc_norm_stderr\": 0.0446196043338474\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.7969348659003831,\n \"acc_stderr\": 0.014385525076611573,\n \"acc_norm\": 0.7969348659003831,\n \"acc_norm_stderr\": 0.014385525076611573\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.708092485549133,\n \"acc_stderr\": 0.024476994076247337,\n \"acc_norm\": 0.708092485549133,\n \"acc_norm_stderr\": 0.024476994076247337\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.24134078212290502,\n \"acc_stderr\": 0.014310999547961459,\n \"acc_norm\": 0.24134078212290502,\n \"acc_norm_stderr\": 0.014310999547961459\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.7058823529411765,\n \"acc_stderr\": 0.02609016250427905,\n \"acc_norm\": 0.7058823529411765,\n \"acc_norm_stderr\": 0.02609016250427905\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.6913183279742765,\n \"acc_stderr\": 0.026236965881153262,\n \"acc_norm\": 0.6913183279742765,\n \"acc_norm_stderr\": 0.026236965881153262\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.7438271604938271,\n \"acc_stderr\": 0.024288533637726095,\n \"acc_norm\": 0.7438271604938271,\n \"acc_norm_stderr\": 0.024288533637726095\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.5035460992907801,\n \"acc_stderr\": 0.02982674915328092,\n \"acc_norm\": 0.5035460992907801,\n \"acc_norm_stderr\": 0.02982674915328092\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.44654498044328556,\n \"acc_stderr\": 0.01269704602439969,\n \"acc_norm\": 0.44654498044328556,\n \"acc_norm_stderr\": 0.01269704602439969\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.6213235294117647,\n \"acc_stderr\": 0.02946513363977613,\n \"acc_norm\": 0.6213235294117647,\n \"acc_norm_stderr\": 0.02946513363977613\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.6650326797385621,\n \"acc_stderr\": 0.019094228167000318,\n \"acc_norm\": 0.6650326797385621,\n \"acc_norm_stderr\": 0.019094228167000318\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6636363636363637,\n \"acc_stderr\": 0.04525393596302506,\n \"acc_norm\": 0.6636363636363637,\n \"acc_norm_stderr\": 0.04525393596302506\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.7183673469387755,\n \"acc_stderr\": 0.028795185574291286,\n \"acc_norm\": 0.7183673469387755,\n \"acc_norm_stderr\": 0.028795185574291286\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.8407960199004975,\n \"acc_stderr\": 0.02587064676616913,\n \"acc_norm\": 0.8407960199004975,\n \"acc_norm_stderr\": 0.02587064676616913\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.82,\n \"acc_stderr\": 0.038612291966536955,\n \"acc_norm\": 0.82,\n \"acc_norm_stderr\": 0.038612291966536955\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.536144578313253,\n \"acc_stderr\": 0.038823108508905954,\n \"acc_norm\": 0.536144578313253,\n \"acc_norm_stderr\": 0.038823108508905954\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.8128654970760234,\n \"acc_stderr\": 0.029913127232368036,\n \"acc_norm\": 0.8128654970760234,\n \"acc_norm_stderr\": 0.029913127232368036\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.27539779681762544,\n \"mc1_stderr\": 0.01563813566777552,\n \"mc2\": 0.4232205129895114,\n \"mc2_stderr\": 0.014379155129720761\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7821625887924231,\n \"acc_stderr\": 0.011601066079939324\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.3411675511751327,\n \"acc_stderr\": 0.013059111935831496\n }\n}\n```", "repo_url": "https://huggingface.co/dball/zephyr-7b-sft-qlora", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_01_25T13_46_13.704337", "path": ["**/details_harness|arc:challenge|25_2024-01-25T13-46-13.704337.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-01-25T13-46-13.704337.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_01_25T13_46_13.704337", "path": ["**/details_harness|gsm8k|5_2024-01-25T13-46-13.704337.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-01-25T13-46-13.704337.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_01_25T13_46_13.704337", "path": ["**/details_harness|hellaswag|10_2024-01-25T13-46-13.704337.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-01-25T13-46-13.704337.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_01_25T13_46_13.704337", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-25T13-46-13.704337.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-25T13-46-13.704337.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-25T13-46-13.704337.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-25T13-46-13.704337.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-25T13-46-13.704337.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-25T13-46-13.704337.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-25T13-46-13.704337.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-25T13-46-13.704337.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-25T13-46-13.704337.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-25T13-46-13.704337.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-25T13-46-13.704337.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-25T13-46-13.704337.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-25T13-46-13.704337.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-25T13-46-13.704337.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-25T13-46-13.704337.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-25T13-46-13.704337.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-25T13-46-13.704337.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-25T13-46-13.704337.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-25T13-46-13.704337.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-25T13-46-13.704337.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-25T13-46-13.704337.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-25T13-46-13.704337.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-25T13-46-13.704337.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-25T13-46-13.704337.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-25T13-46-13.704337.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-25T13-46-13.704337.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-25T13-46-13.704337.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-25T13-46-13.704337.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-25T13-46-13.704337.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-25T13-46-13.704337.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-25T13-46-13.704337.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-25T13-46-13.704337.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-25T13-46-13.704337.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-25T13-46-13.704337.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-25T13-46-13.704337.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-25T13-46-13.704337.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-25T13-46-13.704337.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-25T13-46-13.704337.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-25T13-46-13.704337.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-25T13-46-13.704337.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-25T13-46-13.704337.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-25T13-46-13.704337.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-25T13-46-13.704337.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-25T13-46-13.704337.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-25T13-46-13.704337.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-25T13-46-13.704337.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-25T13-46-13.704337.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-25T13-46-13.704337.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-25T13-46-13.704337.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-25T13-46-13.704337.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-25T13-46-13.704337.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-25T13-46-13.704337.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-25T13-46-13.704337.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-25T13-46-13.704337.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-25T13-46-13.704337.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-25T13-46-13.704337.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-25T13-46-13.704337.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-25T13-46-13.704337.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-25T13-46-13.704337.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-25T13-46-13.704337.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-25T13-46-13.704337.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-25T13-46-13.704337.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-25T13-46-13.704337.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-25T13-46-13.704337.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-25T13-46-13.704337.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-25T13-46-13.704337.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-25T13-46-13.704337.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-25T13-46-13.704337.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-25T13-46-13.704337.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-25T13-46-13.704337.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-25T13-46-13.704337.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-25T13-46-13.704337.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-25T13-46-13.704337.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-25T13-46-13.704337.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-25T13-46-13.704337.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-25T13-46-13.704337.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-25T13-46-13.704337.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-25T13-46-13.704337.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-25T13-46-13.704337.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-25T13-46-13.704337.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-25T13-46-13.704337.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-25T13-46-13.704337.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-25T13-46-13.704337.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-25T13-46-13.704337.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-25T13-46-13.704337.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-25T13-46-13.704337.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-25T13-46-13.704337.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-25T13-46-13.704337.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-25T13-46-13.704337.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-25T13-46-13.704337.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-25T13-46-13.704337.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-25T13-46-13.704337.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-25T13-46-13.704337.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-25T13-46-13.704337.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-25T13-46-13.704337.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-25T13-46-13.704337.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-25T13-46-13.704337.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-25T13-46-13.704337.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-25T13-46-13.704337.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-25T13-46-13.704337.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-25T13-46-13.704337.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-25T13-46-13.704337.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-25T13-46-13.704337.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-25T13-46-13.704337.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-25T13-46-13.704337.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-25T13-46-13.704337.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-25T13-46-13.704337.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-25T13-46-13.704337.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-25T13-46-13.704337.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-25T13-46-13.704337.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-25T13-46-13.704337.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-25T13-46-13.704337.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-25T13-46-13.704337.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-25T13-46-13.704337.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_01_25T13_46_13.704337", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-25T13-46-13.704337.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-25T13-46-13.704337.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_01_25T13_46_13.704337", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-25T13-46-13.704337.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-25T13-46-13.704337.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_01_25T13_46_13.704337", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-25T13-46-13.704337.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-25T13-46-13.704337.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_01_25T13_46_13.704337", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-25T13-46-13.704337.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-25T13-46-13.704337.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_01_25T13_46_13.704337", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-25T13-46-13.704337.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-25T13-46-13.704337.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_01_25T13_46_13.704337", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-25T13-46-13.704337.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-25T13-46-13.704337.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_01_25T13_46_13.704337", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-25T13-46-13.704337.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-25T13-46-13.704337.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_01_25T13_46_13.704337", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-25T13-46-13.704337.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-25T13-46-13.704337.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_01_25T13_46_13.704337", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-25T13-46-13.704337.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-25T13-46-13.704337.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_01_25T13_46_13.704337", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-25T13-46-13.704337.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-25T13-46-13.704337.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_01_25T13_46_13.704337", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-25T13-46-13.704337.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-25T13-46-13.704337.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_01_25T13_46_13.704337", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-25T13-46-13.704337.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-25T13-46-13.704337.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_01_25T13_46_13.704337", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-25T13-46-13.704337.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-25T13-46-13.704337.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_01_25T13_46_13.704337", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-25T13-46-13.704337.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-25T13-46-13.704337.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_01_25T13_46_13.704337", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-25T13-46-13.704337.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-25T13-46-13.704337.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_01_25T13_46_13.704337", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-25T13-46-13.704337.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-25T13-46-13.704337.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_01_25T13_46_13.704337", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-25T13-46-13.704337.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-25T13-46-13.704337.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_01_25T13_46_13.704337", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-25T13-46-13.704337.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-25T13-46-13.704337.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_01_25T13_46_13.704337", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-25T13-46-13.704337.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-25T13-46-13.704337.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_01_25T13_46_13.704337", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-25T13-46-13.704337.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-25T13-46-13.704337.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_01_25T13_46_13.704337", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-25T13-46-13.704337.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-25T13-46-13.704337.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_01_25T13_46_13.704337", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-25T13-46-13.704337.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-25T13-46-13.704337.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_01_25T13_46_13.704337", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-25T13-46-13.704337.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-25T13-46-13.704337.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_01_25T13_46_13.704337", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-25T13-46-13.704337.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-25T13-46-13.704337.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_01_25T13_46_13.704337", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-25T13-46-13.704337.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-25T13-46-13.704337.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_01_25T13_46_13.704337", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-25T13-46-13.704337.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-25T13-46-13.704337.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_01_25T13_46_13.704337", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-25T13-46-13.704337.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-25T13-46-13.704337.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_01_25T13_46_13.704337", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-25T13-46-13.704337.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-25T13-46-13.704337.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_01_25T13_46_13.704337", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-25T13-46-13.704337.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-25T13-46-13.704337.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_01_25T13_46_13.704337", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-25T13-46-13.704337.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-25T13-46-13.704337.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_01_25T13_46_13.704337", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-25T13-46-13.704337.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-25T13-46-13.704337.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_01_25T13_46_13.704337", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-25T13-46-13.704337.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-25T13-46-13.704337.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_01_25T13_46_13.704337", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-25T13-46-13.704337.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-25T13-46-13.704337.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_01_25T13_46_13.704337", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-25T13-46-13.704337.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-25T13-46-13.704337.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_01_25T13_46_13.704337", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-25T13-46-13.704337.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-25T13-46-13.704337.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_01_25T13_46_13.704337", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-25T13-46-13.704337.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-25T13-46-13.704337.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_01_25T13_46_13.704337", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-25T13-46-13.704337.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-25T13-46-13.704337.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_01_25T13_46_13.704337", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-25T13-46-13.704337.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-25T13-46-13.704337.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_01_25T13_46_13.704337", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-25T13-46-13.704337.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-25T13-46-13.704337.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_01_25T13_46_13.704337", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-25T13-46-13.704337.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-25T13-46-13.704337.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_01_25T13_46_13.704337", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-25T13-46-13.704337.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-25T13-46-13.704337.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_01_25T13_46_13.704337", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-25T13-46-13.704337.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-25T13-46-13.704337.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_01_25T13_46_13.704337", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-25T13-46-13.704337.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-25T13-46-13.704337.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_01_25T13_46_13.704337", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-25T13-46-13.704337.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-25T13-46-13.704337.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_01_25T13_46_13.704337", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-25T13-46-13.704337.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-25T13-46-13.704337.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_01_25T13_46_13.704337", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-25T13-46-13.704337.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-25T13-46-13.704337.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_01_25T13_46_13.704337", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-25T13-46-13.704337.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-25T13-46-13.704337.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_01_25T13_46_13.704337", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-25T13-46-13.704337.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-25T13-46-13.704337.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_01_25T13_46_13.704337", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-25T13-46-13.704337.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-25T13-46-13.704337.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_01_25T13_46_13.704337", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-25T13-46-13.704337.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-25T13-46-13.704337.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_01_25T13_46_13.704337", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-25T13-46-13.704337.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-25T13-46-13.704337.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_01_25T13_46_13.704337", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-25T13-46-13.704337.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-25T13-46-13.704337.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_01_25T13_46_13.704337", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-25T13-46-13.704337.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-25T13-46-13.704337.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_01_25T13_46_13.704337", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-25T13-46-13.704337.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-25T13-46-13.704337.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_01_25T13_46_13.704337", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-25T13-46-13.704337.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-25T13-46-13.704337.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_01_25T13_46_13.704337", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-25T13-46-13.704337.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-25T13-46-13.704337.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_01_25T13_46_13.704337", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-25T13-46-13.704337.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-25T13-46-13.704337.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_01_25T13_46_13.704337", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-25T13-46-13.704337.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-25T13-46-13.704337.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_01_25T13_46_13.704337", "path": ["**/details_harness|winogrande|5_2024-01-25T13-46-13.704337.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-01-25T13-46-13.704337.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_01_25T13_46_13.704337", "path": ["results_2024-01-25T13-46-13.704337.parquet"]}, {"split": "latest", "path": ["results_2024-01-25T13-46-13.704337.parquet"]}]}]}
2024-01-25T13:48:57+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of dball/zephyr-7b-sft-qlora Dataset automatically created during the evaluation run of model dball/zephyr-7b-sft-qlora on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-01-25T13:46:13.704337(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of dball/zephyr-7b-sft-qlora\n\n\n\nDataset automatically created during the evaluation run of model dball/zephyr-7b-sft-qlora on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-25T13:46:13.704337(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of dball/zephyr-7b-sft-qlora\n\n\n\nDataset automatically created during the evaluation run of model dball/zephyr-7b-sft-qlora on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-25T13:46:13.704337(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ 6, 187, 68, 4, 40, 29, 3, 4, 9, 6, 5, 7, 4, 7, 10, 9, 5, 9, 8, 10, 46, 8, 7, 10, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of dball/zephyr-7b-sft-qlora\n\n\n\nDataset automatically created during the evaluation run of model dball/zephyr-7b-sft-qlora on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2024-01-25T13:46:13.704337(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):## Dataset Details### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:## Uses### Direct Use### Out-of-Scope Use## Dataset Structure## Dataset Creation### Curation Rationale### Source Data#### Data Collection and Processing#### Who are the source data producers?### Annotations [optional]#### Annotation process#### Who are the annotators?#### Personal and Sensitive Information## Bias, Risks, and Limitations### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:## Glossary [optional]## More Information [optional]## Dataset Card Authors [optional]## Dataset Card Contact" ]
[ -0.058380916714668274, 0.2107771933078766, -0.005691645201295614, 0.04063160717487335, 0.09008432924747467, -0.014037074521183968, 0.02922976389527321, 0.0985957682132721, 0.015762802213430405, 0.17667490243911743, -0.020358629524707794, 0.11079032719135284, 0.0700487494468689, 0.12767699360847473, 0.023096943274140358, -0.1355646550655365, 0.018464859575033188, -0.08767051249742508, 0.06803623586893082, 0.08352459967136383, 0.06787455826997757, -0.08269073069095612, 0.0650215744972229, -0.025362897664308548, 0.03067447617650032, -0.019733276218175888, -0.08441789448261261, -0.029520481824874878, 0.096109539270401, 0.11715641617774963, 0.03912319615483284, -0.017052944749593735, 0.022535622119903564, -0.272175669670105, 0.017133712768554688, 0.09188464283943176, -0.0024705471005290747, 0.029485922306776047, 0.1340918391942978, -0.057622842490673065, 0.0877590924501419, -0.02850504219532013, 0.07544006407260895, 0.058340299874544144, -0.10645370185375214, -0.14519724249839783, -0.1429339200258255, -0.010830966755747795, 0.06761148571968079, 0.048738762736320496, -0.02520456537604332, 0.13764330744743347, -0.06749740242958069, 0.04693467170000076, 0.12069657444953918, -0.10093767195940018, -0.017766114324331284, 0.06541438400745392, 0.013915501534938812, 0.06379289925098419, -0.0887124314904213, -0.026645224541425705, 0.030390052124857903, 0.052791789174079895, 0.0021465877071022987, 0.012270389124751091, -0.0037330593913793564, 0.014871231280267239, -0.13579294085502625, -0.12590549886226654, 0.14516326785087585, 0.004765419289469719, -0.046294569969177246, -0.18417134881019592, -0.00929223746061325, 0.012947240844368935, 0.00463110813871026, -0.018302572891116142, -0.000058853765949606895, -0.025474313646554947, 0.0997077152132988, -0.00852882582694292, -0.10305725038051605, -0.029389172792434692, -0.015385618433356285, 0.08653721213340759, 0.01539896335452795, -0.014187535271048546, 0.001700814813375473, 0.12072455883026123, 0.005834546871483326, -0.07525545358657837, -0.08197622746229172, -0.06185538321733475, -0.13318970799446106, -0.04883986711502075, 0.0007970854640007019, -0.0844046100974083, 0.04414740204811096, 0.23295830190181732, -0.010128420777618885, 0.03312866389751434, -0.09321776032447815, 0.0037822641897946596, 0.11770401895046234, 0.06577570736408234, -0.07043501734733582, -0.05769684910774231, -0.03090541809797287, 0.030762601643800735, 0.029999710619449615, -0.01678106188774109, 0.005130234640091658, 0.052297480404376984, 0.03163888305425644, 0.11342623829841614, 0.11697407066822052, 0.0464896559715271, -0.06635089218616486, -0.026719648391008377, 0.23984327912330627, -0.14232683181762695, -0.02324330061674118, 0.02565043419599533, -0.05305396765470505, -0.12392912805080414, 0.06317241489887238, -0.005398553796112537, -0.043341949582099915, 0.13649266958236694, -0.04383997991681099, -0.0762573629617691, -0.07414329051971436, -0.05857658013701439, 0.0555063858628273, 0.033937256783246994, -0.03859931230545044, -0.07773284614086151, -0.08788080513477325, -0.08735167235136032, 0.026582250371575356, -0.057212695479393005, -0.03039875626564026, 0.01061655767261982, -0.00557967321947217, -0.012609286233782768, -0.0059062885120511055, 0.10863596200942993, -0.05287951976060867, 0.04469936341047287, -0.0010753294918686152, 0.012516283430159092, 0.08242008090019226, 0.04563215374946594, -0.11716058850288391, 0.07093900442123413, -0.15367475152015686, 0.0996764525771141, -0.11808587610721588, -0.013848749920725822, -0.12197762727737427, -0.013037532567977905, -0.019197849556803703, 0.03631441667675972, -0.029115399345755577, 0.09317482262849808, -0.22614969313144684, 0.009563596919178963, 0.13561159372329712, -0.11365702748298645, -0.08156470954418182, 0.07978643476963043, -0.04973313957452774, 0.0538417249917984, 0.03644496202468872, 0.10857819020748138, 0.12234324216842651, -0.07421364635229111, -0.08442002534866333, -0.05709445849061012, -0.03090737760066986, 0.16154205799102783, 0.06322399526834488, -0.08272890746593475, 0.11343851685523987, 0.05633719265460968, -0.021586760878562927, -0.07058605551719666, 0.008666925132274628, -0.0664936751127243, -0.004768840968608856, -0.07484148442745209, -0.03792567923665047, -0.006199568510055542, -0.07240230590105057, -0.0070975408889353275, -0.0802214965224266, -0.015762370079755783, 0.09714702516794205, -0.013273198157548904, 0.004810090642422438, -0.06871052086353302, 0.028801148757338524, 0.018550027161836624, 0.015545111149549484, -0.22304992377758026, -0.11942977458238602, 0.0281592458486557, -0.19116425514221191, 0.05187748000025749, 0.03973573073744774, 0.013796238228678703, 0.046643372625112534, -0.003436662722378969, 0.032561443746089935, 0.018639368936419487, -0.011014332994818687, -0.006658162921667099, -0.12990069389343262, -0.057880301028490067, -0.09266643971204758, 0.09117237478494644, -0.15039891004562378, -0.014297863468527794, 0.07317274808883667, 0.15134015679359436, 0.02412179484963417, -0.08700752258300781, 0.060930781066417694, 0.010393954813480377, -0.036526892334222794, -0.053099051117897034, -0.0011814744211733341, -0.02569347620010376, 0.0260881669819355, 0.013760724104940891, -0.1977437436580658, -0.09609346091747284, 0.06730754673480988, 0.13478823006153107, -0.07874851673841476, -0.11400090157985687, -0.05777205899357796, -0.06422396749258041, -0.0774143785238266, -0.06534364074468613, 0.08948488533496857, 0.09463166445493698, 0.04516118764877319, -0.06085205078125, -0.0534079372882843, 0.009153557941317558, 0.06447052955627441, -0.059162259101867676, 0.10165289044380188, 0.0781567245721817, -0.07910633087158203, 0.10334409028291702, -0.051060762256383896, 0.10718315094709396, 0.07058117538690567, 0.022772885859012604, -0.09130557626485825, 0.0098764318972826, 0.06815682351589203, 0.036303918808698654, 0.07578220963478088, -0.04279765114188194, 0.037893541157245636, 0.08243989944458008, -0.017800644040107727, 0.032743003219366074, -0.061011750251054764, 0.026422854512929916, 0.0404093936085701, 0.005625074729323387, 0.012826431542634964, 0.020172448828816414, 0.016960857436060905, 0.07690183818340302, 0.02380414865911007, 0.08672679960727692, -0.010600687935948372, -0.051190994679927826, -0.09906637668609619, 0.14662864804267883, -0.08765514194965363, -0.28506171703338623, -0.16068099439144135, -0.032836150377988815, -0.0460117906332016, -0.013447768054902554, 0.0620688833296299, -0.005362591706216335, -0.09818483889102936, -0.09768325090408325, 0.046653393656015396, 0.027682222425937653, -0.13796257972717285, -0.06668023020029068, 0.056243814527988434, -0.0013294985983520746, -0.16825540363788605, 0.048161886632442474, 0.045762717723846436, -0.045942313969135284, -0.0030793463811278343, 0.08814119547605515, 0.11621282994747162, 0.08644257485866547, 0.07375381886959076, -0.02535932883620262, -0.013118485920131207, 0.17952686548233032, -0.11575314402580261, 0.03408696874976158, 0.098526731133461, -0.05193683132529259, 0.07053862512111664, 0.1804642528295517, 0.01578662544488907, -0.08572402596473694, 0.0569743737578392, 0.09094163775444031, -0.0656290054321289, -0.2395707666873932, -0.12135209143161774, -0.029496021568775177, -0.0004443596117198467, 0.10223072022199631, 0.06663373112678528, 0.01835259050130844, 0.01124553568661213, -0.11167025566101074, -0.028173372149467468, -0.047304339706897736, 0.0656934529542923, 0.08583001792430878, 0.004373909439891577, 0.04088144749403, -0.03611341491341591, 0.01896381750702858, 0.10669414699077606, 0.04390732944011688, 0.15189129114151, -0.052285268902778625, 0.16181568801403046, 0.07846640795469284, 0.07120433449745178, -0.03874681890010834, 0.03868047893047333, 0.003233279101550579, 0.06220964342355728, -0.017890017479658127, -0.10276155173778534, -0.06240501254796982, 0.10741432756185532, 0.037865716964006424, -0.07796464115381241, 0.028810866177082062, -0.0664777010679245, 0.031275127083063126, 0.17871108651161194, -0.03859095275402069, -0.12635639309883118, -0.05567789822816849, 0.044346168637275696, -0.03151734173297882, -0.09028185158967972, -0.0024647009558975697, 0.07256055623292923, -0.1481262445449829, 0.02251536026597023, -0.04170718789100647, 0.07254168391227722, -0.13130396604537964, -0.017718613147735596, -0.007947161793708801, 0.04032041132450104, 0.0003355792723596096, 0.12070155888795853, -0.1511155664920807, 0.08561024069786072, -0.004332534968852997, 0.01689114235341549, -0.10821551084518433, 0.054378218948841095, -0.031210947781801224, -0.05010434240102768, 0.13061514496803284, -0.009877596981823444, -0.08580876886844635, -0.05560515448451042, -0.09376867115497589, -0.006997149903327227, 0.06006144732236862, -0.09733343869447708, 0.10302358865737915, 0.03442646563053131, -0.018977608531713486, -0.03634607046842575, -0.007496154401451349, -0.10402879118919373, -0.24244917929172516, 0.11258058249950409, -0.1274898499250412, 0.057167764753103256, -0.06598284840583801, -0.043960947543382645, -0.0509805828332901, 0.1508471965789795, -0.07706563174724579, -0.06486921012401581, -0.10662022233009338, -0.012720771133899689, 0.1793050765991211, -0.053073983639478683, 0.06768261641263962, -0.03682275488972664, 0.17219296097755432, -0.018857795745134354, -0.040723416954278946, -0.0038478695787489414, -0.0832899734377861, -0.176527202129364, -0.047946296632289886, 0.10753099620342255, 0.07709383964538574, 0.011282311752438545, -0.0018006032332777977, 0.010763566941022873, 0.025562386959791183, -0.09971548616886139, 0.023737680166959763, 0.12058599293231964, 0.11101532727479935, 0.035154204815626144, -0.007631621323525906, -0.12216155230998993, -0.10494482517242432, -0.09381222724914551, 0.051004428416490555, 0.1700703203678131, -0.06725436449050903, 0.17032349109649658, 0.13684311509132385, -0.08092887699604034, -0.1824326515197754, -0.06928523629903793, 0.03336070477962494, -0.029036322608590126, 0.1281953901052475, -0.2097853720188141, 0.06527893990278244, 0.06755188852548599, -0.029915999621152878, 0.13041548430919647, -0.2509755492210388, -0.13478589057922363, 0.04376722499728203, 0.036829058080911636, -0.2377292811870575, -0.1722918450832367, -0.11177755147218704, -0.019304517656564713, -0.17095234990119934, 0.12199674546718597, 0.043327078223228455, 0.027863990515470505, -0.02252751961350441, 0.09789376705884933, 0.06425631046295166, -0.0703815221786499, 0.13344267010688782, -0.00436860416084528, 0.02068464085459709, -0.10715348273515701, -0.04983361065387726, -0.015005577355623245, -0.04210143908858299, 0.07642696052789688, 0.026423227041959763, 0.05723242834210396, -0.09011317789554596, -0.03537199646234512, -0.06748004257678986, 0.0494585856795311, -0.0722922682762146, -0.05976030230522156, -0.07084894180297852, 0.08452200889587402, 0.08018110692501068, -0.009217726066708565, 0.016107330098748207, -0.04990196228027344, 0.04056210815906525, 0.21945995092391968, 0.11150655895471573, 0.05539353936910629, -0.11887083947658539, -0.0350598469376564, -0.011102689430117607, -0.0002681240439414978, -0.10257588326931, 0.040902212262153625, 0.09465406835079193, 0.05065072700381279, 0.07511244714260101, -0.023574616760015488, -0.1798567771911621, 0.004877162165939808, 0.07582972943782806, -0.08966754376888275, -0.19600404798984528, 0.025265075266361237, 0.1461225301027298, -0.15632332861423492, -0.061691030859947205, 0.07479517161846161, 0.02300727367401123, -0.03545794636011124, -0.0012558888411149383, 0.07227097451686859, 0.064543716609478, 0.1066993847489357, 0.015805890783667564, 0.055013686418533325, -0.06984080374240875, 0.08525103330612183, 0.13391980528831482, -0.11278991401195526, 0.004681731108576059, 0.032810814678668976, -0.06560319662094116, -0.06989233940839767, -0.01928463764488697, -0.014912545680999756, 0.016907824203372, -0.03337794542312622, 0.031046226620674133, -0.02224915288388729, 0.05453655868768692, 0.12252563238143921, 0.009245789609849453, 0.045395709574222565, 0.01362044457346201, -0.005072139203548431, -0.05490566045045853, 0.10314030200242996, 0.03099573217332363, 0.049056075513362885, -0.04689826816320419, 0.026999909430742264, 0.0055152010172605515, -0.024837017059326172, 0.018106359988451004, -0.04718007892370224, -0.06436236947774887, -0.0019306649919599295, -0.16694562137126923, 0.05370462313294411, -0.08168760687112808, 0.012575377710163593, 0.00082089111674577, -0.017625339329242706, 0.008897464722394943, 0.008449211716651917, -0.07745477557182312, -0.048477061092853546, -0.04376188665628433, 0.12731251120567322, -0.19014355540275574, -0.00961703434586525, 0.08205488324165344, -0.07104235887527466, 0.07358653098344803, -0.006028392817825079, -0.022197015583515167, 0.0183540228754282, -0.09067955613136292, -0.005189036950469017, -0.02700893208384514, 0.0671265572309494, 0.008537070825695992, -0.1342766433954239, -0.017486218363046646, -0.0038145326543599367, -0.08326210081577301, -0.012229139916598797, 0.028111595660448074, -0.16586065292358398, 0.045463576912879944, 0.08576089143753052, -0.04148300364613533, -0.04872559756040573, 0.035590238869190216, 0.05735976994037628, -0.0026144585572183132, 0.10389724373817444, -0.0002352348528802395, 0.038572296500205994, -0.14222340285778046, -0.04785830155014992, 0.0006796956295147538, 0.01417570747435093, 0.04086671769618988, 0.0256104227155447, 0.027279920876026154, 0.005024063400924206, 0.21423517167568207, -0.016840528696775436, 0.039078548550605774, 0.01864277571439743, -0.00485930684953928, -0.01581188477575779, 0.02968025952577591, 0.02112693525850773, 0.002953716553747654, 0.022528454661369324, 0.02923372946679592, -0.024945788085460663, -0.06464533507823944, -0.024474119767546654, 0.06608739495277405, 0.13038158416748047, 0.14142641425132751, -0.03936956822872162, 0.05806051939725876, -0.16080144047737122, -0.05252368003129959, 0.02657466009259224, -0.035022735595703125, 0.04180719330906868, -0.08195023983716965, 0.05791737139225006, 0.0770266205072403, -0.09599195420742035, 0.14688417315483093, -0.06332710385322571, -0.02856074646115303, -0.025296498090028763, -0.16564060747623444, -0.03564520552754402, 0.024589955806732178, 0.006873424630612135, -0.08400288224220276, 0.11105285584926605, 0.12721385061740875, -0.015754301100969315, -0.01209025178104639, 0.09403645992279053, -0.05500316619873047, -0.061504244804382324, -0.02460946887731552, 0.0017859467770904303, 0.009940212592482567, 0.012894503772258759, 0.07629603147506714, 0.019291171804070473, 0.05523638427257538, 0.06791508197784424, 0.0920906811952591, 0.0298606026917696, 0.009272423572838306, -0.037458501756191254, -0.04645824432373047, 0.00020174169912934303, -0.012976757250726223, -0.0570015013217926, 0.21546834707260132, 0.05072440579533577, 0.01946881040930748, 0.0071942973881959915, 0.22083666920661926, -0.004773456137627363, -0.07299676537513733, -0.12582212686538696, 0.13986578583717346, 0.01089552417397499, 0.02769327536225319, 0.027259383350610733, -0.12578433752059937, 0.03609717637300491, 0.15073177218437195, 0.10489273071289062, 0.04117477685213089, 0.012902851216495037, 0.04005042091012001, 0.024555105715990067, -0.028677888214588165, 0.054617367684841156, 0.028231661766767502, 0.2312101423740387, -0.0543757900595665, 0.07299111783504486, -0.0103475796058774, -0.0013427268713712692, -0.014481984078884125, 0.10403436422348022, -0.032617323100566864, 0.016656244173645973, -0.06967911869287491, 0.09589062631130219, -0.07195630669593811, -0.26142674684524536, -0.02092519775032997, -0.061559878289699554, -0.13564133644104004, -0.013569780625402927, 0.02636895328760147, -0.016299018636345863, 0.04445277899503708, 0.030549069866538048, -0.024510756134986877, 0.19103364646434784, 0.0005666390061378479, -0.0803566724061966, -0.05769423395395279, 0.07130984961986542, -0.03434053063392639, 0.27294671535491943, -0.0017705729696899652, 0.0798439234495163, 0.090349480509758, -0.009186755865812302, -0.13295143842697144, 0.02677883952856064, 0.09643213450908661, -0.05916062369942665, 0.06698621809482574, 0.16279134154319763, -0.015146982856094837, 0.14999839663505554, 0.03298293799161911, -0.028049476444721222, 0.07659061253070831, 0.08812028169631958, 0.03834807127714157, -0.10101087391376495, 0.08413605391979218, -0.08695614337921143, 0.13155734539031982, 0.10579520463943481, -0.01225749310106039, -0.0008183619938790798, -0.05276970937848091, 0.057321369647979736, -0.03070688620209694, 0.13692373037338257, -0.02459302730858326, -0.1495896279811859, 0.0415535569190979, 0.0009011237416416407, 0.051997870206832886, -0.22141584753990173, -0.05400189757347107, 0.10789447277784348, -0.04817373305559158, 0.029056206345558167, 0.08244055509567261, 0.03417661786079407, 0.01355416513979435, -0.062212370336055756, -0.08567801117897034, -0.0018181167542934418, 0.11967428028583527, -0.09838247299194336, -0.040894679725170135 ]
f7abbd3ac5b50fe069aa80fde84e1bc5c22e5066
The Othello dataset from https://github.com/likenneth/othello_world. It has been converted to run with https://github.com/alxndrTL/othello_mamba. Instructions to download it and use it can be found on the `othello_mamba` repo.
alexandretl/othello
[ "size_categories:10M<n<100M", "region:us" ]
2024-01-25T13:50:28+00:00
{"size_categories": ["10M<n<100M"]}
2024-02-02T20:39:04+00:00
[]
[]
TAGS #size_categories-10M<n<100M #region-us
The Othello dataset from URL It has been converted to run with URL Instructions to download it and use it can be found on the 'othello_mamba' repo.
[]
[ "TAGS\n#size_categories-10M<n<100M #region-us \n" ]
[ 18 ]
[ "passage: TAGS\n#size_categories-10M<n<100M #region-us \n" ]
[ -0.04859356954693794, -0.011498402804136276, -0.0052909282967448235, -0.010571276769042015, 0.1104101613163948, 0.03590274602174759, 0.03806858882308006, 0.057009872049093246, 0.19126497209072113, 0.04956918582320213, 0.1103055477142334, -0.007075469475239515, 0.010750941000878811, 0.13862942159175873, -0.07567078620195389, -0.3129783868789673, 0.053320690989494324, 0.028585471212863922, 0.031645771116018295, 0.07125598937273026, -0.019641347229480743, -0.14468199014663696, 0.012897824868559837, -0.16815616190433502, -0.0692874863743782, 0.06504209339618683, 0.006774745881557465, -0.06583667546510696, 0.030055247247219086, -0.1022121012210846, 0.11335394531488419, -0.03205808997154236, -0.050583984702825546, -0.2321237474679947, 0.0005972764920443296, -0.021563079208135605, -0.02199375443160534, 0.0480065681040287, 0.10948512703180313, -0.016285540536046028, -0.0774964988231659, -0.10802409797906876, -0.027825789526104927, 0.08628445118665695, -0.20242705941200256, -0.1497577577829361, -0.07848114520311356, -0.060099516063928604, 0.06797664612531662, 0.022209446877241135, -0.037476666271686554, 0.1375860869884491, -0.24488984048366547, 0.0017461180686950684, 0.274764746427536, -0.28388726711273193, 0.06634852290153503, 0.2902055084705353, 0.04378298297524452, 0.15666471421718597, -0.05573679134249687, 0.04177147522568703, 0.1256761997938156, -0.014539534226059914, -0.013402381911873817, -0.06764576584100723, 0.008275904692709446, 0.11334626376628876, -0.06465747952461243, -0.04790846258401871, 0.3400304615497589, 0.1189398467540741, 0.03172101452946663, -0.021322790533304214, -0.09818235039710999, -0.22023358941078186, -0.03117523342370987, 0.10112153738737106, 0.08554727584123611, 0.06533652544021606, 0.04638400301337242, 0.06040704622864723, -0.07834535092115402, -0.02024698071181774, -0.24130550026893616, 0.18554171919822693, -0.05585069954395294, 0.06997885555028915, -0.1635408103466034, -0.046611372381448746, -0.38366395235061646, -0.05776927247643471, 0.05391162261366844, -0.0712517648935318, -0.10014324635267258, -0.015622851438820362, -0.07180650532245636, 0.03199901804327965, 0.09447348862886429, 0.11234354227781296, 0.014903428964316845, 0.05737554654479027, 0.06526386737823486, 0.09934644401073456, 0.08225972950458527, 0.13818755745887756, -0.08450501412153244, -0.1374034881591797, -0.04152122512459755, -0.1152670606970787, 0.06998379528522491, -0.043668217957019806, -0.12528076767921448, -0.1252516508102417, -0.011611687950789928, 0.10480021685361862, -0.0471419058740139, -0.020505886524915695, -0.07676184922456741, 0.04377615451812744, -0.028464026749134064, -0.0596364364027977, -0.000445540645159781, 0.00030477854306809604, -0.024972982704639435, 0.12187539786100388, -0.21864375472068787, -0.012897739186882973, 0.07652955502271652, 0.10616219788789749, -0.133106529712677, -0.00921093299984932, -0.048177383840084076, -0.10637685656547546, 0.09152518957853317, -0.15793132781982422, 0.012869328260421753, -0.10380101203918457, -0.09508312493562698, 0.03005353733897209, -0.034411631524562836, -0.04401500150561333, 0.09705505520105362, -0.019438665360212326, -0.019872436299920082, -0.0735015720129013, -0.04058307781815529, -0.03847075626254082, -0.06008264794945717, 0.05093175545334816, 0.03520676866173744, 0.15352000296115875, -0.17357535660266876, -0.0099912965670228, -0.08562330156564713, 0.12254012376070023, -0.11122986674308777, -0.03803587704896927, -0.006223828066140413, 0.23190264403820038, -0.0295928493142128, -0.01809731498360634, -0.20497526228427887, -0.03151527792215347, -0.028789032250642776, 0.13405543565750122, -0.22379480302333832, -0.0718005895614624, 0.17013266682624817, 0.034396953880786896, -0.05401691049337387, 0.02595621347427368, 0.04503115266561508, -0.06389358639717102, 0.09127671271562576, 0.4206089675426483, -0.08498214930295944, 0.06001456081867218, 0.009694304317235947, 0.16678380966186523, -0.1000245064496994, -0.24941080808639526, 0.10015576332807541, -0.1287703514099121, -0.03035864047706127, 0.021177096292376518, 0.17939500510692596, 0.027505172416567802, -0.05580532178282738, -0.008798112161457539, -0.005503222346305847, -0.07078585773706436, 0.17394211888313293, 0.03329011797904968, 0.10299425572156906, -0.11006896942853928, 0.10846240073442459, 0.01736653596162796, 0.017140459269285202, 0.08213955909013748, -0.011534511111676693, -0.004322797060012817, 0.13495337963104248, -0.19001223146915436, 0.019922366365790367, -0.19814997911453247, -0.020127980038523674, 0.05300457775592804, -0.011985852383077145, -0.0028078658506274223, 0.19792860746383667, 0.0798676460981369, -0.08230957388877869, -0.037089113146066666, 0.08592682331800461, 0.06422486901283264, 0.040846552699804306, -0.10958125442266464, 0.08667892217636108, 0.016494695097208023, -0.06144588813185692, -0.09526410698890686, -0.042780157178640366, 0.024769006296992302, 0.1727766990661621, -0.0252932608127594, 0.05239248275756836, 0.023239515721797943, 0.0373779758810997, 0.041569195687770844, 0.04909475892782211, -0.02736694924533367, 0.06254682689905167, -0.08206930011510849, -0.10599765926599503, -0.07252765446901321, -0.21651019155979156, 0.13288335502147675, 0.15340256690979004, -0.09315090626478195, 0.053927019238471985, -0.09999019652605057, -0.0068465289659798145, 0.05749833583831787, 0.04628147929906845, -0.07674094289541245, -0.10165863484144211, 0.018910251557826996, 0.09377870708703995, -0.05315176025032997, -0.06744053959846497, -0.010595896281301975, -0.05584064871072769, -0.046191658824682236, 0.1070079356431961, 0.09752681106328964, -0.10792281478643417, 0.16381913423538208, 0.36216533184051514, 0.07709381729364395, 0.10633287578821182, -0.07023990899324417, -0.06925614178180695, -0.009549898095428944, -0.02290244586765766, -0.04799393564462662, 0.08433199673891068, -0.15023475885391235, -0.021305926144123077, 0.06256488710641861, 0.08999865502119064, 0.10737411677837372, -0.09060919284820557, -0.08758017420768738, 0.002166218124330044, 0.0030082545708864927, -0.15852756798267365, 0.055093299597501755, 0.018343212082982063, 0.06304646283388138, 0.09005945175886154, 0.017592716962099075, 0.051116302609443665, -0.05115946754813194, -0.056328848004341125, 0.14256952702999115, -0.12664450705051422, -0.1586531102657318, -0.08005000650882721, -0.07411443442106247, -0.008249836042523384, 0.07686350494623184, -0.00028502263012342155, -0.21237777173519135, -0.029857339337468147, -0.014267624355852604, 0.05928985774517059, -0.14889930188655853, 0.06086404621601105, 0.03517431393265724, 0.09202601760625839, -0.09142617881298065, -0.03672143444418907, -0.04454253986477852, -0.10679414868354797, 0.08945612609386444, 0.07896657288074493, -0.18501435220241547, 0.0841745063662529, 0.32017195224761963, -0.0016819502925500274, 0.09517873078584671, 0.0019987092819064856, 0.14053975045681, -0.05526537075638771, -0.016877181828022003, 0.11636240780353546, 0.08632953464984894, 0.009635006077587605, 0.24138975143432617, 0.05808067321777344, -0.11993381381034851, -0.040117453783750534, -0.004670651629567146, -0.15401458740234375, -0.16773848235607147, -0.0932287946343422, -0.16152943670749664, 0.10723067075014114, 0.08201809227466583, 0.03397871553897858, 0.048190370202064514, 0.06486070901155472, 0.10358753800392151, 0.041670508682727814, -0.06758391112089157, 0.028900565579533577, 0.11996055394411087, -0.024964772164821625, -0.010563211515545845, -0.089568592607975, -0.05185803398489952, 0.10246691107749939, 0.10005155205726624, 0.18732763826847076, 0.2339787632226944, 0.19309936463832855, -0.030484383925795555, -0.03502182289958, 0.12766389548778534, 0.19476671516895294, 0.097486712038517, -0.05150043964385986, -0.04052330181002617, -0.004940263461321592, 0.07689940184354782, -0.029843593016266823, 0.0990525558590889, -0.15244507789611816, -0.06845199316740036, -0.22951819002628326, 0.14412376284599304, -0.05192948505282402, 0.24580954015254974, -0.15821297466754913, 0.10054896026849747, 0.11525467038154602, -0.0025083543732762337, -0.019984543323516846, 0.0912286564707756, 0.09925778210163116, -0.07561688870191574, 0.06425443291664124, 0.004941500257700682, 0.10442396998405457, -0.029135363176465034, 0.11271541565656662, -0.07662417739629745, -0.08288370817899704, 0.008511281572282314, 0.09413645416498184, -0.06672403216362, 0.26885417103767395, 0.04236888512969017, -0.09398111701011658, -0.08571652323007584, -0.081174336373806, -0.032855283468961716, 0.062178924679756165, 0.03962791711091995, 0.043706074357032776, -0.026393385604023933, -0.19420669972896576, -0.06214291974902153, -0.05249480903148651, 0.17185066640377045, -0.05752955749630928, -0.01774783805012703, -0.007164927199482918, 0.015859583392739296, -0.004935773555189371, 0.0017726313089951873, 0.0245114266872406, -0.058239005506038666, -0.03685620054602623, 0.14256983995437622, -0.10948549956083298, 0.009469767101109028, 0.054605141282081604, -0.053712304681539536, 0.07967882603406906, -0.08914244920015335, -0.023617416620254517, -0.07384758442640305, -0.05266871303319931, 0.18318699300289154, -0.04427090659737587, 0.045512448996305466, -0.0595354400575161, -0.02197994850575924, -0.07641717791557312, -0.12427232414484024, 0.17200450599193573, -0.018035145476460457, 0.02772456593811512, -0.04988996684551239, 0.18103645741939545, -0.16345585882663727, 0.08205264061689377, -0.03261271491646767, 0.06128339841961861, -0.017550233751535416, -0.11990237236022949, 0.022372551262378693, -0.05716999992728233, 0.08307209610939026, 0.17878137528896332, 0.019614223390817642, 0.04167831689119339, 0.07178787887096405, 0.02937738411128521, 0.2318275421857834, 0.28142231702804565, -0.059830665588378906, 0.048783306032419205, 0.20467700064182281, 0.06995205581188202, -0.21415852010250092, 0.00450811255723238, -0.22182296216487885, -0.014920295216143131, 0.06586246937513351, -0.025899149477481842, 0.10973577946424484, 0.16559147834777832, -0.022435983642935753, 0.20758050680160522, -0.26459982991218567, -0.023442696779966354, 0.11116064339876175, -0.06092434749007225, 0.47514963150024414, -0.12229382246732712, -0.11985527724027634, -0.15511690080165863, -0.09873969107866287, 0.04827937111258507, -0.1019749715924263, 0.12225427478551865, -0.06809071451425552, 0.04093613848090172, 0.06419188529253006, -0.07300376892089844, 0.2188156247138977, 0.10307732224464417, 0.0953211858868599, -0.0807124674320221, -0.3007047474384308, 0.21569137275218964, -0.046607278287410736, 0.011634581722319126, -0.009479292668402195, -0.037397705018520355, -0.05083855614066124, -0.007563307415693998, -0.034636352211236954, -0.04965673387050629, 0.07244347035884857, -0.07385498285293579, -0.10763881355524063, -0.03694191947579384, -0.0768151730298996, -0.009629687294363976, 0.21388958394527435, -0.054391756653785706, 0.04770025238394737, 0.060335636138916016, 0.06379911303520203, -0.1472458839416504, -0.0015527538489550352, 0.019965337589383125, -0.027607126161456108, 0.07422949373722076, -0.19291557371616364, 0.03401266410946846, 0.16917717456817627, 0.008478731848299503, 0.043695706874132156, 0.0983097031712532, -0.039544351398944855, -0.02166585996747017, 0.18695873022079468, -0.06132092326879501, -0.0993010550737381, 0.016802402213215828, -0.12442512065172195, -0.055927824229002, 0.09810901433229446, -0.08962896466255188, 0.13434070348739624, 0.0279786866158247, -0.006709831766784191, -0.01004722435027361, -0.07663267105817795, 0.09262196719646454, 0.08832306414842606, 0.056242022663354874, -0.12998858094215393, 0.15869249403476715, 0.061284538358449936, 0.004472261294722557, -0.11497312039136887, 0.15112100541591644, -0.12380237877368927, -0.04202096536755562, -0.025119304656982422, -0.010455883108079433, 0.039878830313682556, -0.02505308948457241, 0.005514789838343859, -0.10499786585569382, 0.022658944129943848, 0.06442353874444962, 0.05697264522314072, 0.08233670890331268, 0.018906710669398308, -0.05596482381224632, 0.05032355710864067, -0.0822269544005394, -0.047962091863155365, 0.059056974947452545, -0.1357947438955307, -0.06028076633810997, -0.07736501842737198, 0.08314452320337296, -0.05413467064499855, -0.03539562225341797, -0.162206768989563, 0.021985290572047234, -0.15963244438171387, -0.06402282416820526, -0.08829427510499954, -0.054436661303043365, -0.011982407420873642, 0.05118313059210777, -0.06154342740774155, -0.10190636664628983, -0.14559097588062286, 0.020884865894913673, -0.02978830225765705, 0.018496090546250343, 0.05041592940688133, -0.013251904398202896, 0.06581351906061172, -0.012737118639051914, 0.12856288254261017, 0.04744347557425499, 0.025735681876540184, 0.1135348379611969, -0.0719221979379654, -0.14160671830177307, 0.15932917594909668, 0.010538417845964432, 0.0814920961856842, 0.14271119236946106, -0.055801209062337875, -0.008320996537804604, 0.09494466334581375, 0.07777982205152512, -0.14887262880802155, -0.13966384530067444, -0.01635051518678665, -0.1412089318037033, -0.19372248649597168, -0.041752029210329056, -0.1400347650051117, 0.12578538060188293, 0.011768972501158714, 0.04835442453622818, 0.09004954993724823, 0.08656882494688034, -0.08597443997859955, -0.009469310753047466, -0.03694223612546921, -0.1201498955488205, 0.031354982405900955, 0.010283886454999447, 0.044586170464754105, 0.021765269339084625, 0.3774277865886688, -0.02008848823606968, -0.03607475757598877, -0.00265556201338768, 0.1731826364994049, 0.10226425528526306, 0.007595285773277283, 0.38343188166618347, 0.14646320044994354, -0.12208206206560135, -0.12339328974485397, 0.10394139587879181, 0.057017672806978226, -0.02141711115837097, 0.1636447310447693, 0.12842394411563873, -0.1538425087928772, 0.08712825179100037, -0.0372016467154026, -0.04643817991018295, 0.031021593138575554, 0.07979122549295425, 0.02261577919125557, -0.009004559367895126, 0.02048415131866932, 0.030136877670884132, 0.18046468496322632, -0.10097618401050568, 0.06577092409133911, -0.03777974471449852, -0.028059732168912888, -0.1372911036014557, -0.15750965476036072, -0.07400108873844147, -0.13572311401367188, 0.01744874194264412, -0.03483004868030548, -0.03652646392583847, 0.2559188902378082, -0.0019889601971954107, 0.0416918508708477, 0.12041782587766647, -0.1657327115535736, -0.0690145418047905, 0.0285998173058033, -0.019989661872386932, -0.01931578479707241, 0.043067652732133865, -0.05848295986652374, -0.01871664635837078, -0.15676173567771912, -0.023794298991560936, 0.03947046771645546, -0.08993411064147949, -0.06794606894254684, -0.13170398771762848, -0.07704756408929825, -0.05972277745604515, 0.04907584562897682, -0.05642552673816681, 0.1066683828830719, -0.015507855452597141, -0.06577619165182114, 0.004269362892955542, 0.1444566249847412, -0.08289683610200882, -0.05352126434445381, 0.01977171190083027, 0.08988111466169357, 0.04952021688222885, 0.13386689126491547, -0.11834190040826797, -0.1497201919555664, -0.051065027713775635, 0.21220944821834564, 0.25094670057296753, -0.017795942723751068, -0.010170848108828068, -0.007675099652260542, 0.02283734269440174, 0.1014014184474945, 0.09649848937988281, -0.010772242210805416, 0.19201679527759552, -0.0020964606665074825, 0.05174591392278671, 0.061207499355077744, -0.03598807752132416, -0.12930215895175934, 0.13057053089141846, 0.08459167927503586, -0.04127715528011322, -0.1565679907798767, 0.16536904871463776, -0.06474626064300537, 0.274646133184433, 0.12973757088184357, -0.19758519530296326, -0.05642712488770485, -0.018896417692303658, 0.0051837507635355, -0.0135745108127594, 0.15666010975837708, -0.114761121571064, -0.14211758971214294, -0.26482293009757996, 0.01913205347955227, -0.3690235912799835, -0.13671152293682098, 0.06959434598684311, 0.12325140833854675, 0.06480962038040161, -0.01405256986618042, 0.04601996764540672, 0.06520673632621765, -0.023029576987028122, -0.026662414893507957, 0.17183789610862732, 0.0322052501142025, 0.05702501907944679, -0.052713729441165924, 0.026597855612635612, -0.014280823059380054, -0.20937490463256836, 0.06485284119844437, -0.16542044281959534, -0.02242438495159149, 0.12452962249517441, -0.02251540869474411, -0.06783220916986465, -0.05055992305278778, -0.06458094716072083, 0.07228448241949081, 0.0039401729591190815, -0.0747590959072113, -0.007733492646366358, -0.01857782155275345, -0.03135622665286064, 0.07924371212720871, -0.18552932143211365, -0.14091534912586212, 0.026043593883514404, -0.04712681844830513, 0.15886655449867249, -0.05014101043343544, -0.11330235004425049, 0.050547048449516296, -0.08219730108976364, 0.09512671828269958, -0.08486583828926086, 0.02034114859998226, 0.12917321920394897, -0.019637661054730415, 0.007442891597747803, -0.2694902718067169, 0.03945545852184296, 0.09727673977613449, -0.14057821035385132, -0.09114525467157364 ]
07878ac681cc7f73cf95c05ee015bbd00cc838af
# Dataset Card for Evaluation run of CultriX/SevereNeuralBeagleTrix-7B <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [CultriX/SevereNeuralBeagleTrix-7B](https://huggingface.co/CultriX/SevereNeuralBeagleTrix-7B) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_CultriX__SevereNeuralBeagleTrix-7B", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-01-25T13:49:38.033237](https://huggingface.co/datasets/open-llm-leaderboard/details_CultriX__SevereNeuralBeagleTrix-7B/blob/main/results_2024-01-25T13-49-38.033237.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.6569238167645259, "acc_stderr": 0.032076224381540994, "acc_norm": 0.6561773763743689, "acc_norm_stderr": 0.032749625942617946, "mc1": 0.5556915544675642, "mc1_stderr": 0.01739458625074318, "mc2": 0.6902166608949867, "mc2_stderr": 0.014973712558809735 }, "harness|arc:challenge|25": { "acc": 0.7039249146757679, "acc_stderr": 0.013340916085246256, "acc_norm": 0.7278156996587031, "acc_norm_stderr": 0.013006600406423704 }, "harness|hellaswag|10": { "acc": 0.7104162517426807, "acc_stderr": 0.0045264221258606695, "acc_norm": 0.8832901812387971, "acc_norm_stderr": 0.0032041800729423805 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.37, "acc_stderr": 0.04852365870939099, "acc_norm": 0.37, "acc_norm_stderr": 0.04852365870939099 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.6592592592592592, "acc_stderr": 0.04094376269996792, "acc_norm": 0.6592592592592592, "acc_norm_stderr": 0.04094376269996792 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.6907894736842105, "acc_stderr": 0.037610708698674805, "acc_norm": 0.6907894736842105, "acc_norm_stderr": 0.037610708698674805 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.67, "acc_stderr": 0.047258156262526094, "acc_norm": 0.67, "acc_norm_stderr": 0.047258156262526094 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.7056603773584905, "acc_stderr": 0.028049186315695255, "acc_norm": 0.7056603773584905, "acc_norm_stderr": 0.028049186315695255 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.7847222222222222, "acc_stderr": 0.03437079344106135, "acc_norm": 0.7847222222222222, "acc_norm_stderr": 0.03437079344106135 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.48, "acc_stderr": 0.050211673156867795, "acc_norm": 0.48, "acc_norm_stderr": 0.050211673156867795 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.54, "acc_stderr": 0.05009082659620333, "acc_norm": 0.54, "acc_norm_stderr": 0.05009082659620333 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.34, "acc_stderr": 0.04760952285695235, "acc_norm": 0.34, "acc_norm_stderr": 0.04760952285695235 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.6820809248554913, "acc_stderr": 0.0355068398916558, "acc_norm": 0.6820809248554913, "acc_norm_stderr": 0.0355068398916558 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.4117647058823529, "acc_stderr": 0.04897104952726366, "acc_norm": 0.4117647058823529, "acc_norm_stderr": 0.04897104952726366 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.77, "acc_stderr": 0.04229525846816506, "acc_norm": 0.77, "acc_norm_stderr": 0.04229525846816506 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.5872340425531914, "acc_stderr": 0.03218471141400351, "acc_norm": 0.5872340425531914, "acc_norm_stderr": 0.03218471141400351 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.49122807017543857, "acc_stderr": 0.04702880432049615, "acc_norm": 0.49122807017543857, "acc_norm_stderr": 0.04702880432049615 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.5862068965517241, "acc_stderr": 0.04104269211806232, "acc_norm": 0.5862068965517241, "acc_norm_stderr": 0.04104269211806232 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.41798941798941797, "acc_stderr": 0.02540255550326091, "acc_norm": 0.41798941798941797, "acc_norm_stderr": 0.02540255550326091 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.47619047619047616, "acc_stderr": 0.04467062628403273, "acc_norm": 0.47619047619047616, "acc_norm_stderr": 0.04467062628403273 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.32, "acc_stderr": 0.046882617226215034, "acc_norm": 0.32, "acc_norm_stderr": 0.046882617226215034 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.7903225806451613, "acc_stderr": 0.023157879349083525, "acc_norm": 0.7903225806451613, "acc_norm_stderr": 0.023157879349083525 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.4975369458128079, "acc_stderr": 0.03517945038691063, "acc_norm": 0.4975369458128079, "acc_norm_stderr": 0.03517945038691063 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.7, "acc_stderr": 0.046056618647183814, "acc_norm": 0.7, "acc_norm_stderr": 0.046056618647183814 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.7818181818181819, "acc_stderr": 0.03225078108306289, "acc_norm": 0.7818181818181819, "acc_norm_stderr": 0.03225078108306289 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.797979797979798, "acc_stderr": 0.02860620428922987, "acc_norm": 0.797979797979798, "acc_norm_stderr": 0.02860620428922987 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.9015544041450777, "acc_stderr": 0.021500249576033456, "acc_norm": 0.9015544041450777, "acc_norm_stderr": 0.021500249576033456 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.6692307692307692, "acc_stderr": 0.02385479568097112, "acc_norm": 0.6692307692307692, "acc_norm_stderr": 0.02385479568097112 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.34444444444444444, "acc_stderr": 0.02897264888484427, "acc_norm": 0.34444444444444444, "acc_norm_stderr": 0.02897264888484427 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.6680672268907563, "acc_stderr": 0.03058869701378364, "acc_norm": 0.6680672268907563, "acc_norm_stderr": 0.03058869701378364 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.3576158940397351, "acc_stderr": 0.03913453431177258, "acc_norm": 0.3576158940397351, "acc_norm_stderr": 0.03913453431177258 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.8440366972477065, "acc_stderr": 0.01555580271359017, "acc_norm": 0.8440366972477065, "acc_norm_stderr": 0.01555580271359017 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.5231481481481481, "acc_stderr": 0.03406315360711507, "acc_norm": 0.5231481481481481, "acc_norm_stderr": 0.03406315360711507 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.8431372549019608, "acc_stderr": 0.025524722324553353, "acc_norm": 0.8431372549019608, "acc_norm_stderr": 0.025524722324553353 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.8016877637130801, "acc_stderr": 0.02595502084162113, "acc_norm": 0.8016877637130801, "acc_norm_stderr": 0.02595502084162113 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.6860986547085202, "acc_stderr": 0.031146796482972465, "acc_norm": 0.6860986547085202, "acc_norm_stderr": 0.031146796482972465 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.7786259541984732, "acc_stderr": 0.0364129708131373, "acc_norm": 0.7786259541984732, "acc_norm_stderr": 0.0364129708131373 }, "harness|hendrycksTest-international_law|5": { "acc": 0.7851239669421488, "acc_stderr": 0.037494924487096966, "acc_norm": 0.7851239669421488, "acc_norm_stderr": 0.037494924487096966 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.7777777777777778, "acc_stderr": 0.0401910747255735, "acc_norm": 0.7777777777777778, "acc_norm_stderr": 0.0401910747255735 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.7300613496932515, "acc_stderr": 0.03487825168497892, "acc_norm": 0.7300613496932515, "acc_norm_stderr": 0.03487825168497892 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.5, "acc_stderr": 0.04745789978762494, "acc_norm": 0.5, "acc_norm_stderr": 0.04745789978762494 }, "harness|hendrycksTest-management|5": { "acc": 0.7766990291262136, "acc_stderr": 0.04123553189891431, "acc_norm": 0.7766990291262136, "acc_norm_stderr": 0.04123553189891431 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8803418803418803, "acc_stderr": 0.021262719400406964, "acc_norm": 0.8803418803418803, "acc_norm_stderr": 0.021262719400406964 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.73, "acc_stderr": 0.044619604333847394, "acc_norm": 0.73, "acc_norm_stderr": 0.044619604333847394 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.8352490421455939, "acc_stderr": 0.013265346261323792, "acc_norm": 0.8352490421455939, "acc_norm_stderr": 0.013265346261323792 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.7398843930635838, "acc_stderr": 0.023618678310069356, "acc_norm": 0.7398843930635838, "acc_norm_stderr": 0.023618678310069356 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.43910614525139663, "acc_stderr": 0.01659802212058043, "acc_norm": 0.43910614525139663, "acc_norm_stderr": 0.01659802212058043 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.7026143790849673, "acc_stderr": 0.02617390850671858, "acc_norm": 0.7026143790849673, "acc_norm_stderr": 0.02617390850671858 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.7202572347266881, "acc_stderr": 0.02549425935069491, "acc_norm": 0.7202572347266881, "acc_norm_stderr": 0.02549425935069491 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.7407407407407407, "acc_stderr": 0.024383665531035454, "acc_norm": 0.7407407407407407, "acc_norm_stderr": 0.024383665531035454 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.48226950354609927, "acc_stderr": 0.02980873964223777, "acc_norm": 0.48226950354609927, "acc_norm_stderr": 0.02980873964223777 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.47196870925684486, "acc_stderr": 0.012750151802922438, "acc_norm": 0.47196870925684486, "acc_norm_stderr": 0.012750151802922438 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.6691176470588235, "acc_stderr": 0.02858270975389845, "acc_norm": 0.6691176470588235, "acc_norm_stderr": 0.02858270975389845 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.6797385620915033, "acc_stderr": 0.018875682938069443, "acc_norm": 0.6797385620915033, "acc_norm_stderr": 0.018875682938069443 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6636363636363637, "acc_stderr": 0.04525393596302506, "acc_norm": 0.6636363636363637, "acc_norm_stderr": 0.04525393596302506 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.7346938775510204, "acc_stderr": 0.0282638899437846, "acc_norm": 0.7346938775510204, "acc_norm_stderr": 0.0282638899437846 }, "harness|hendrycksTest-sociology|5": { "acc": 0.8308457711442786, "acc_stderr": 0.02650859065623327, "acc_norm": 0.8308457711442786, "acc_norm_stderr": 0.02650859065623327 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.86, "acc_stderr": 0.0348735088019777, "acc_norm": 0.86, "acc_norm_stderr": 0.0348735088019777 }, "harness|hendrycksTest-virology|5": { "acc": 0.5602409638554217, "acc_stderr": 0.03864139923699121, "acc_norm": 0.5602409638554217, "acc_norm_stderr": 0.03864139923699121 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.8245614035087719, "acc_stderr": 0.029170885500727665, "acc_norm": 0.8245614035087719, "acc_norm_stderr": 0.029170885500727665 }, "harness|truthfulqa:mc|0": { "mc1": 0.5556915544675642, "mc1_stderr": 0.01739458625074318, "mc2": 0.6902166608949867, "mc2_stderr": 0.014973712558809735 }, "harness|winogrande|5": { "acc": 0.8382004735595896, "acc_stderr": 0.010350128010292406 }, "harness|gsm8k|5": { "acc": 0.7164518574677786, "acc_stderr": 0.012415070917508125 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_CultriX__SevereNeuralBeagleTrix-7B
[ "region:us" ]
2024-01-25T13:51:59+00:00
{"pretty_name": "Evaluation run of CultriX/SevereNeuralBeagleTrix-7B", "dataset_summary": "Dataset automatically created during the evaluation run of model [CultriX/SevereNeuralBeagleTrix-7B](https://huggingface.co/CultriX/SevereNeuralBeagleTrix-7B) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_CultriX__SevereNeuralBeagleTrix-7B\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-01-25T13:49:38.033237](https://huggingface.co/datasets/open-llm-leaderboard/details_CultriX__SevereNeuralBeagleTrix-7B/blob/main/results_2024-01-25T13-49-38.033237.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.6569238167645259,\n \"acc_stderr\": 0.032076224381540994,\n \"acc_norm\": 0.6561773763743689,\n \"acc_norm_stderr\": 0.032749625942617946,\n \"mc1\": 0.5556915544675642,\n \"mc1_stderr\": 0.01739458625074318,\n \"mc2\": 0.6902166608949867,\n \"mc2_stderr\": 0.014973712558809735\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.7039249146757679,\n \"acc_stderr\": 0.013340916085246256,\n \"acc_norm\": 0.7278156996587031,\n \"acc_norm_stderr\": 0.013006600406423704\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.7104162517426807,\n \"acc_stderr\": 0.0045264221258606695,\n \"acc_norm\": 0.8832901812387971,\n \"acc_norm_stderr\": 0.0032041800729423805\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.37,\n \"acc_stderr\": 0.04852365870939099,\n \"acc_norm\": 0.37,\n \"acc_norm_stderr\": 0.04852365870939099\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.6592592592592592,\n \"acc_stderr\": 0.04094376269996792,\n \"acc_norm\": 0.6592592592592592,\n \"acc_norm_stderr\": 0.04094376269996792\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.6907894736842105,\n \"acc_stderr\": 0.037610708698674805,\n \"acc_norm\": 0.6907894736842105,\n \"acc_norm_stderr\": 0.037610708698674805\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.67,\n \"acc_stderr\": 0.047258156262526094,\n \"acc_norm\": 0.67,\n \"acc_norm_stderr\": 0.047258156262526094\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.7056603773584905,\n \"acc_stderr\": 0.028049186315695255,\n \"acc_norm\": 0.7056603773584905,\n \"acc_norm_stderr\": 0.028049186315695255\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.7847222222222222,\n \"acc_stderr\": 0.03437079344106135,\n \"acc_norm\": 0.7847222222222222,\n \"acc_norm_stderr\": 0.03437079344106135\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.48,\n \"acc_stderr\": 0.050211673156867795,\n \"acc_norm\": 0.48,\n \"acc_norm_stderr\": 0.050211673156867795\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.54,\n \"acc_stderr\": 0.05009082659620333,\n \"acc_norm\": 0.54,\n \"acc_norm_stderr\": 0.05009082659620333\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.34,\n \"acc_stderr\": 0.04760952285695235,\n \"acc_norm\": 0.34,\n \"acc_norm_stderr\": 0.04760952285695235\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.6820809248554913,\n \"acc_stderr\": 0.0355068398916558,\n \"acc_norm\": 0.6820809248554913,\n \"acc_norm_stderr\": 0.0355068398916558\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.4117647058823529,\n \"acc_stderr\": 0.04897104952726366,\n \"acc_norm\": 0.4117647058823529,\n \"acc_norm_stderr\": 0.04897104952726366\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.77,\n \"acc_stderr\": 0.04229525846816506,\n \"acc_norm\": 0.77,\n \"acc_norm_stderr\": 0.04229525846816506\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.5872340425531914,\n \"acc_stderr\": 0.03218471141400351,\n \"acc_norm\": 0.5872340425531914,\n \"acc_norm_stderr\": 0.03218471141400351\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.49122807017543857,\n \"acc_stderr\": 0.04702880432049615,\n \"acc_norm\": 0.49122807017543857,\n \"acc_norm_stderr\": 0.04702880432049615\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.5862068965517241,\n \"acc_stderr\": 0.04104269211806232,\n \"acc_norm\": 0.5862068965517241,\n \"acc_norm_stderr\": 0.04104269211806232\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.41798941798941797,\n \"acc_stderr\": 0.02540255550326091,\n \"acc_norm\": 0.41798941798941797,\n \"acc_norm_stderr\": 0.02540255550326091\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.47619047619047616,\n \"acc_stderr\": 0.04467062628403273,\n \"acc_norm\": 0.47619047619047616,\n \"acc_norm_stderr\": 0.04467062628403273\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.32,\n \"acc_stderr\": 0.046882617226215034,\n \"acc_norm\": 0.32,\n \"acc_norm_stderr\": 0.046882617226215034\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.7903225806451613,\n \"acc_stderr\": 0.023157879349083525,\n \"acc_norm\": 0.7903225806451613,\n \"acc_norm_stderr\": 0.023157879349083525\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.4975369458128079,\n \"acc_stderr\": 0.03517945038691063,\n \"acc_norm\": 0.4975369458128079,\n \"acc_norm_stderr\": 0.03517945038691063\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.7,\n \"acc_stderr\": 0.046056618647183814,\n \"acc_norm\": 0.7,\n \"acc_norm_stderr\": 0.046056618647183814\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.7818181818181819,\n \"acc_stderr\": 0.03225078108306289,\n \"acc_norm\": 0.7818181818181819,\n \"acc_norm_stderr\": 0.03225078108306289\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.797979797979798,\n \"acc_stderr\": 0.02860620428922987,\n \"acc_norm\": 0.797979797979798,\n \"acc_norm_stderr\": 0.02860620428922987\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.9015544041450777,\n \"acc_stderr\": 0.021500249576033456,\n \"acc_norm\": 0.9015544041450777,\n \"acc_norm_stderr\": 0.021500249576033456\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.6692307692307692,\n \"acc_stderr\": 0.02385479568097112,\n \"acc_norm\": 0.6692307692307692,\n \"acc_norm_stderr\": 0.02385479568097112\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.34444444444444444,\n \"acc_stderr\": 0.02897264888484427,\n \"acc_norm\": 0.34444444444444444,\n \"acc_norm_stderr\": 0.02897264888484427\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.6680672268907563,\n \"acc_stderr\": 0.03058869701378364,\n \"acc_norm\": 0.6680672268907563,\n \"acc_norm_stderr\": 0.03058869701378364\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.3576158940397351,\n \"acc_stderr\": 0.03913453431177258,\n \"acc_norm\": 0.3576158940397351,\n \"acc_norm_stderr\": 0.03913453431177258\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.8440366972477065,\n \"acc_stderr\": 0.01555580271359017,\n \"acc_norm\": 0.8440366972477065,\n \"acc_norm_stderr\": 0.01555580271359017\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.5231481481481481,\n \"acc_stderr\": 0.03406315360711507,\n \"acc_norm\": 0.5231481481481481,\n \"acc_norm_stderr\": 0.03406315360711507\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.8431372549019608,\n \"acc_stderr\": 0.025524722324553353,\n \"acc_norm\": 0.8431372549019608,\n \"acc_norm_stderr\": 0.025524722324553353\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.8016877637130801,\n \"acc_stderr\": 0.02595502084162113,\n \"acc_norm\": 0.8016877637130801,\n \"acc_norm_stderr\": 0.02595502084162113\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.6860986547085202,\n \"acc_stderr\": 0.031146796482972465,\n \"acc_norm\": 0.6860986547085202,\n \"acc_norm_stderr\": 0.031146796482972465\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.7786259541984732,\n \"acc_stderr\": 0.0364129708131373,\n \"acc_norm\": 0.7786259541984732,\n \"acc_norm_stderr\": 0.0364129708131373\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.7851239669421488,\n \"acc_stderr\": 0.037494924487096966,\n \"acc_norm\": 0.7851239669421488,\n \"acc_norm_stderr\": 0.037494924487096966\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.7777777777777778,\n \"acc_stderr\": 0.0401910747255735,\n \"acc_norm\": 0.7777777777777778,\n \"acc_norm_stderr\": 0.0401910747255735\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.7300613496932515,\n \"acc_stderr\": 0.03487825168497892,\n \"acc_norm\": 0.7300613496932515,\n \"acc_norm_stderr\": 0.03487825168497892\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.5,\n \"acc_stderr\": 0.04745789978762494,\n \"acc_norm\": 0.5,\n \"acc_norm_stderr\": 0.04745789978762494\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.7766990291262136,\n \"acc_stderr\": 0.04123553189891431,\n \"acc_norm\": 0.7766990291262136,\n \"acc_norm_stderr\": 0.04123553189891431\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8803418803418803,\n \"acc_stderr\": 0.021262719400406964,\n \"acc_norm\": 0.8803418803418803,\n \"acc_norm_stderr\": 0.021262719400406964\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.73,\n \"acc_stderr\": 0.044619604333847394,\n \"acc_norm\": 0.73,\n \"acc_norm_stderr\": 0.044619604333847394\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.8352490421455939,\n \"acc_stderr\": 0.013265346261323792,\n \"acc_norm\": 0.8352490421455939,\n \"acc_norm_stderr\": 0.013265346261323792\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.7398843930635838,\n \"acc_stderr\": 0.023618678310069356,\n \"acc_norm\": 0.7398843930635838,\n \"acc_norm_stderr\": 0.023618678310069356\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.43910614525139663,\n \"acc_stderr\": 0.01659802212058043,\n \"acc_norm\": 0.43910614525139663,\n \"acc_norm_stderr\": 0.01659802212058043\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.7026143790849673,\n \"acc_stderr\": 0.02617390850671858,\n \"acc_norm\": 0.7026143790849673,\n \"acc_norm_stderr\": 0.02617390850671858\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.7202572347266881,\n \"acc_stderr\": 0.02549425935069491,\n \"acc_norm\": 0.7202572347266881,\n \"acc_norm_stderr\": 0.02549425935069491\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.7407407407407407,\n \"acc_stderr\": 0.024383665531035454,\n \"acc_norm\": 0.7407407407407407,\n \"acc_norm_stderr\": 0.024383665531035454\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.48226950354609927,\n \"acc_stderr\": 0.02980873964223777,\n \"acc_norm\": 0.48226950354609927,\n \"acc_norm_stderr\": 0.02980873964223777\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.47196870925684486,\n \"acc_stderr\": 0.012750151802922438,\n \"acc_norm\": 0.47196870925684486,\n \"acc_norm_stderr\": 0.012750151802922438\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.6691176470588235,\n \"acc_stderr\": 0.02858270975389845,\n \"acc_norm\": 0.6691176470588235,\n \"acc_norm_stderr\": 0.02858270975389845\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.6797385620915033,\n \"acc_stderr\": 0.018875682938069443,\n \"acc_norm\": 0.6797385620915033,\n \"acc_norm_stderr\": 0.018875682938069443\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6636363636363637,\n \"acc_stderr\": 0.04525393596302506,\n \"acc_norm\": 0.6636363636363637,\n \"acc_norm_stderr\": 0.04525393596302506\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.7346938775510204,\n \"acc_stderr\": 0.0282638899437846,\n \"acc_norm\": 0.7346938775510204,\n \"acc_norm_stderr\": 0.0282638899437846\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.8308457711442786,\n \"acc_stderr\": 0.02650859065623327,\n \"acc_norm\": 0.8308457711442786,\n \"acc_norm_stderr\": 0.02650859065623327\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.86,\n \"acc_stderr\": 0.0348735088019777,\n \"acc_norm\": 0.86,\n \"acc_norm_stderr\": 0.0348735088019777\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5602409638554217,\n \"acc_stderr\": 0.03864139923699121,\n \"acc_norm\": 0.5602409638554217,\n \"acc_norm_stderr\": 0.03864139923699121\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.8245614035087719,\n \"acc_stderr\": 0.029170885500727665,\n \"acc_norm\": 0.8245614035087719,\n \"acc_norm_stderr\": 0.029170885500727665\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.5556915544675642,\n \"mc1_stderr\": 0.01739458625074318,\n \"mc2\": 0.6902166608949867,\n \"mc2_stderr\": 0.014973712558809735\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.8382004735595896,\n \"acc_stderr\": 0.010350128010292406\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.7164518574677786,\n \"acc_stderr\": 0.012415070917508125\n }\n}\n```", "repo_url": "https://huggingface.co/CultriX/SevereNeuralBeagleTrix-7B", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_01_25T13_49_38.033237", "path": ["**/details_harness|arc:challenge|25_2024-01-25T13-49-38.033237.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-01-25T13-49-38.033237.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_01_25T13_49_38.033237", "path": ["**/details_harness|gsm8k|5_2024-01-25T13-49-38.033237.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-01-25T13-49-38.033237.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_01_25T13_49_38.033237", "path": ["**/details_harness|hellaswag|10_2024-01-25T13-49-38.033237.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-01-25T13-49-38.033237.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_01_25T13_49_38.033237", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-25T13-49-38.033237.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-25T13-49-38.033237.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-25T13-49-38.033237.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-25T13-49-38.033237.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-25T13-49-38.033237.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-25T13-49-38.033237.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-25T13-49-38.033237.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-25T13-49-38.033237.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-25T13-49-38.033237.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-25T13-49-38.033237.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-25T13-49-38.033237.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-25T13-49-38.033237.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-25T13-49-38.033237.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-25T13-49-38.033237.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-25T13-49-38.033237.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-25T13-49-38.033237.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-25T13-49-38.033237.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-25T13-49-38.033237.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-25T13-49-38.033237.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-25T13-49-38.033237.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-25T13-49-38.033237.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-25T13-49-38.033237.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-25T13-49-38.033237.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-25T13-49-38.033237.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-25T13-49-38.033237.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-25T13-49-38.033237.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-25T13-49-38.033237.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-25T13-49-38.033237.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-25T13-49-38.033237.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-25T13-49-38.033237.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-25T13-49-38.033237.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-25T13-49-38.033237.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-25T13-49-38.033237.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-25T13-49-38.033237.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-25T13-49-38.033237.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-25T13-49-38.033237.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-25T13-49-38.033237.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-25T13-49-38.033237.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-25T13-49-38.033237.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-25T13-49-38.033237.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-25T13-49-38.033237.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-25T13-49-38.033237.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-25T13-49-38.033237.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-25T13-49-38.033237.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-25T13-49-38.033237.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-25T13-49-38.033237.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-25T13-49-38.033237.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-25T13-49-38.033237.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-25T13-49-38.033237.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-25T13-49-38.033237.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-25T13-49-38.033237.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-25T13-49-38.033237.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-25T13-49-38.033237.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-25T13-49-38.033237.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-25T13-49-38.033237.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-25T13-49-38.033237.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-25T13-49-38.033237.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-25T13-49-38.033237.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-25T13-49-38.033237.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-25T13-49-38.033237.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-25T13-49-38.033237.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-25T13-49-38.033237.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-25T13-49-38.033237.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-25T13-49-38.033237.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-25T13-49-38.033237.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-25T13-49-38.033237.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-25T13-49-38.033237.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-25T13-49-38.033237.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-25T13-49-38.033237.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-25T13-49-38.033237.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-25T13-49-38.033237.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-25T13-49-38.033237.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-25T13-49-38.033237.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-25T13-49-38.033237.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-25T13-49-38.033237.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-25T13-49-38.033237.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-25T13-49-38.033237.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-25T13-49-38.033237.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-25T13-49-38.033237.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-25T13-49-38.033237.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-25T13-49-38.033237.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-25T13-49-38.033237.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-25T13-49-38.033237.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-25T13-49-38.033237.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-25T13-49-38.033237.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-25T13-49-38.033237.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-25T13-49-38.033237.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-25T13-49-38.033237.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-25T13-49-38.033237.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-25T13-49-38.033237.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-25T13-49-38.033237.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-25T13-49-38.033237.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-25T13-49-38.033237.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-25T13-49-38.033237.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-25T13-49-38.033237.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-25T13-49-38.033237.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-25T13-49-38.033237.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-25T13-49-38.033237.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-25T13-49-38.033237.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-25T13-49-38.033237.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-25T13-49-38.033237.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-25T13-49-38.033237.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-25T13-49-38.033237.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-25T13-49-38.033237.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-25T13-49-38.033237.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-25T13-49-38.033237.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-25T13-49-38.033237.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-25T13-49-38.033237.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-25T13-49-38.033237.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-25T13-49-38.033237.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-25T13-49-38.033237.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-25T13-49-38.033237.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-25T13-49-38.033237.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-25T13-49-38.033237.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_01_25T13_49_38.033237", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-25T13-49-38.033237.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-25T13-49-38.033237.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_01_25T13_49_38.033237", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-25T13-49-38.033237.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-25T13-49-38.033237.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_01_25T13_49_38.033237", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-25T13-49-38.033237.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-25T13-49-38.033237.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_01_25T13_49_38.033237", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-25T13-49-38.033237.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-25T13-49-38.033237.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_01_25T13_49_38.033237", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-25T13-49-38.033237.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-25T13-49-38.033237.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_01_25T13_49_38.033237", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-25T13-49-38.033237.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-25T13-49-38.033237.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_01_25T13_49_38.033237", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-25T13-49-38.033237.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-25T13-49-38.033237.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_01_25T13_49_38.033237", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-25T13-49-38.033237.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-25T13-49-38.033237.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_01_25T13_49_38.033237", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-25T13-49-38.033237.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-25T13-49-38.033237.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_01_25T13_49_38.033237", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-25T13-49-38.033237.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-25T13-49-38.033237.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_01_25T13_49_38.033237", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-25T13-49-38.033237.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-25T13-49-38.033237.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_01_25T13_49_38.033237", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-25T13-49-38.033237.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-25T13-49-38.033237.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_01_25T13_49_38.033237", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-25T13-49-38.033237.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-25T13-49-38.033237.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_01_25T13_49_38.033237", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-25T13-49-38.033237.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-25T13-49-38.033237.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_01_25T13_49_38.033237", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-25T13-49-38.033237.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-25T13-49-38.033237.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_01_25T13_49_38.033237", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-25T13-49-38.033237.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-25T13-49-38.033237.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_01_25T13_49_38.033237", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-25T13-49-38.033237.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-25T13-49-38.033237.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_01_25T13_49_38.033237", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-25T13-49-38.033237.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-25T13-49-38.033237.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_01_25T13_49_38.033237", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-25T13-49-38.033237.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-25T13-49-38.033237.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_01_25T13_49_38.033237", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-25T13-49-38.033237.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-25T13-49-38.033237.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_01_25T13_49_38.033237", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-25T13-49-38.033237.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-25T13-49-38.033237.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_01_25T13_49_38.033237", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-25T13-49-38.033237.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-25T13-49-38.033237.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_01_25T13_49_38.033237", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-25T13-49-38.033237.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-25T13-49-38.033237.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_01_25T13_49_38.033237", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-25T13-49-38.033237.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-25T13-49-38.033237.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_01_25T13_49_38.033237", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-25T13-49-38.033237.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-25T13-49-38.033237.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_01_25T13_49_38.033237", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-25T13-49-38.033237.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-25T13-49-38.033237.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_01_25T13_49_38.033237", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-25T13-49-38.033237.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-25T13-49-38.033237.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_01_25T13_49_38.033237", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-25T13-49-38.033237.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-25T13-49-38.033237.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_01_25T13_49_38.033237", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-25T13-49-38.033237.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-25T13-49-38.033237.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_01_25T13_49_38.033237", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-25T13-49-38.033237.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-25T13-49-38.033237.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_01_25T13_49_38.033237", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-25T13-49-38.033237.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-25T13-49-38.033237.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_01_25T13_49_38.033237", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-25T13-49-38.033237.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-25T13-49-38.033237.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_01_25T13_49_38.033237", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-25T13-49-38.033237.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-25T13-49-38.033237.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_01_25T13_49_38.033237", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-25T13-49-38.033237.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-25T13-49-38.033237.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_01_25T13_49_38.033237", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-25T13-49-38.033237.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-25T13-49-38.033237.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_01_25T13_49_38.033237", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-25T13-49-38.033237.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-25T13-49-38.033237.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_01_25T13_49_38.033237", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-25T13-49-38.033237.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-25T13-49-38.033237.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_01_25T13_49_38.033237", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-25T13-49-38.033237.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-25T13-49-38.033237.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_01_25T13_49_38.033237", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-25T13-49-38.033237.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-25T13-49-38.033237.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_01_25T13_49_38.033237", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-25T13-49-38.033237.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-25T13-49-38.033237.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_01_25T13_49_38.033237", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-25T13-49-38.033237.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-25T13-49-38.033237.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_01_25T13_49_38.033237", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-25T13-49-38.033237.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-25T13-49-38.033237.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_01_25T13_49_38.033237", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-25T13-49-38.033237.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-25T13-49-38.033237.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_01_25T13_49_38.033237", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-25T13-49-38.033237.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-25T13-49-38.033237.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_01_25T13_49_38.033237", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-25T13-49-38.033237.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-25T13-49-38.033237.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_01_25T13_49_38.033237", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-25T13-49-38.033237.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-25T13-49-38.033237.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_01_25T13_49_38.033237", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-25T13-49-38.033237.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-25T13-49-38.033237.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_01_25T13_49_38.033237", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-25T13-49-38.033237.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-25T13-49-38.033237.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_01_25T13_49_38.033237", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-25T13-49-38.033237.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-25T13-49-38.033237.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_01_25T13_49_38.033237", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-25T13-49-38.033237.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-25T13-49-38.033237.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_01_25T13_49_38.033237", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-25T13-49-38.033237.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-25T13-49-38.033237.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_01_25T13_49_38.033237", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-25T13-49-38.033237.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-25T13-49-38.033237.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_01_25T13_49_38.033237", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-25T13-49-38.033237.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-25T13-49-38.033237.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_01_25T13_49_38.033237", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-25T13-49-38.033237.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-25T13-49-38.033237.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_01_25T13_49_38.033237", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-25T13-49-38.033237.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-25T13-49-38.033237.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_01_25T13_49_38.033237", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-25T13-49-38.033237.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-25T13-49-38.033237.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_01_25T13_49_38.033237", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-25T13-49-38.033237.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-25T13-49-38.033237.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_01_25T13_49_38.033237", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-25T13-49-38.033237.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-25T13-49-38.033237.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_01_25T13_49_38.033237", "path": ["**/details_harness|winogrande|5_2024-01-25T13-49-38.033237.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-01-25T13-49-38.033237.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_01_25T13_49_38.033237", "path": ["results_2024-01-25T13-49-38.033237.parquet"]}, {"split": "latest", "path": ["results_2024-01-25T13-49-38.033237.parquet"]}]}]}
2024-01-25T13:52:21+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of CultriX/SevereNeuralBeagleTrix-7B Dataset automatically created during the evaluation run of model CultriX/SevereNeuralBeagleTrix-7B on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-01-25T13:49:38.033237(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of CultriX/SevereNeuralBeagleTrix-7B\n\n\n\nDataset automatically created during the evaluation run of model CultriX/SevereNeuralBeagleTrix-7B on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-25T13:49:38.033237(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of CultriX/SevereNeuralBeagleTrix-7B\n\n\n\nDataset automatically created during the evaluation run of model CultriX/SevereNeuralBeagleTrix-7B on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-25T13:49:38.033237(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ 6, 191, 68, 4, 40, 29, 3, 4, 9, 6, 5, 7, 4, 7, 10, 9, 5, 9, 8, 10, 46, 8, 7, 10, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of CultriX/SevereNeuralBeagleTrix-7B\n\n\n\nDataset automatically created during the evaluation run of model CultriX/SevereNeuralBeagleTrix-7B on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2024-01-25T13:49:38.033237(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):## Dataset Details### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:## Uses### Direct Use### Out-of-Scope Use## Dataset Structure## Dataset Creation### Curation Rationale### Source Data#### Data Collection and Processing#### Who are the source data producers?### Annotations [optional]#### Annotation process#### Who are the annotators?#### Personal and Sensitive Information## Bias, Risks, and Limitations### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:## Glossary [optional]## More Information [optional]## Dataset Card Authors [optional]" ]
[ -0.040670305490493774, 0.21763958036899567, -0.004346591420471668, 0.03668500855565071, 0.08412831276655197, -0.00943531934171915, 0.023104945197701454, 0.10810402035713196, -0.017762498930096626, 0.1837574690580368, -0.019314991310238838, 0.09711665660142899, 0.07259295880794525, 0.11796402186155319, 0.021483877673745155, -0.13454894721508026, 0.020004410296678543, -0.08123315125703812, 0.0804276242852211, 0.07579579204320908, 0.08437473326921463, -0.08245668560266495, 0.05890786275267601, -0.04397432506084442, -0.002906195819377899, -0.011015426367521286, -0.09178780764341354, -0.03711289167404175, 0.09385480731725693, 0.09797793626785278, 0.041372448205947876, -0.010459788143634796, 0.01999925635755062, -0.25973236560821533, 0.014470534399151802, 0.0864625945687294, -0.009115605615079403, 0.03834918141365051, 0.11145803332328796, -0.08291666954755783, 0.04222475364804268, -0.0676579475402832, 0.07185988873243332, 0.04461725428700447, -0.12273331731557846, -0.10833228379487991, -0.14956793189048767, 0.025151629000902176, 0.058464426547288895, 0.03909892588853836, -0.01985764689743519, 0.14827069640159607, -0.03245168179273605, 0.0482596792280674, 0.1338767111301422, -0.1063956543803215, -0.02783839963376522, 0.0565425306558609, 0.03234756737947464, 0.0719093307852745, -0.09249436855316162, -0.005407014861702919, 0.03765297681093216, 0.04628829285502434, 0.019475415349006653, 0.007961805909872055, -0.038616277277469635, 0.0156598761677742, -0.14713358879089355, -0.11990439891815186, 0.1605842411518097, 0.015266885980963707, -0.03737359866499901, -0.18019790947437286, -0.02198280580341816, 0.006381265819072723, -0.0017707174411043525, -0.044678810983896255, 0.007625315338373184, -0.02484477497637272, 0.08594688773155212, -0.025424430146813393, -0.09585647284984589, -0.017040204256772995, 0.00808875635266304, 0.04711855947971344, 0.020010745152831078, -0.02074587158858776, -0.0006787604070268571, 0.10561060905456543, -0.013427960686385632, -0.08032874763011932, -0.0697341114282608, -0.054471954703330994, -0.09929897636175156, -0.05056669935584068, 0.012363738380372524, -0.06247424706816673, 0.0366031639277935, 0.24218785762786865, -0.017989257350564003, 0.02137739211320877, -0.10389905422925949, 0.011668775230646133, 0.12369827926158905, 0.05303346738219261, -0.0698506087064743, -0.06873118877410889, -0.019114984199404716, 0.027006957679986954, 0.03623776137828827, -0.01953844353556633, 0.014405658468604088, 0.06491804122924805, 0.046197813004255295, 0.11408238112926483, 0.13263948261737823, 0.02681308053433895, -0.06666645407676697, -0.023576006293296814, 0.2340996116399765, -0.1430431604385376, -0.012753474526107311, 0.0009738305234350264, -0.03174701705574989, -0.10719121992588043, 0.09032206982374191, 0.009117224253714085, -0.041346948593854904, 0.11512923240661621, -0.04923155903816223, -0.0820542573928833, -0.0664016604423523, -0.05214357748627663, 0.0622251033782959, -0.014801418408751488, -0.03613866865634918, -0.0850907489657402, -0.07002592086791992, -0.08382359147071838, 0.019376110285520554, -0.06483538448810577, -0.03213952109217644, 0.02498987689614296, -0.013544520363211632, -0.01516281720250845, -0.012063568457961082, 0.08125267177820206, -0.05059456452727318, 0.030669987201690674, 0.009594636969268322, 0.014706777408719063, 0.06797429919242859, 0.04402172565460205, -0.11811628192663193, 0.08233033865690231, -0.1307186484336853, 0.08997292071580887, -0.11399461328983307, -0.006379983853548765, -0.14024943113327026, -0.012874937616288662, -0.02299729362130165, 0.014429638162255287, -0.003910125233232975, 0.09892017394304276, -0.21699315309524536, 0.016959398984909058, 0.13957378268241882, -0.10453303158283234, -0.1118980273604393, 0.07788271456956863, -0.046463437378406525, 0.07931887358427048, 0.05406508967280388, 0.1052759438753128, 0.12171511352062225, -0.07442332059144974, -0.1224178671836853, -0.0816969946026802, -0.027458064258098602, 0.15222860872745514, 0.06785092502832413, -0.07501213997602463, 0.12870390713214874, 0.03869280964136124, -0.011813001707196236, -0.09861891716718674, -0.0122919250279665, -0.06447261571884155, -0.017988361418247223, -0.06048322841525078, -0.06003664806485176, -0.008566088043153286, -0.08527963608503342, -0.01346938218921423, -0.08125266432762146, 0.02846609242260456, 0.0835450291633606, -0.02612915262579918, 0.01979176513850689, -0.06293222308158875, 0.05168373882770538, 0.010930838994681835, 0.01871938817203045, -0.21044789254665375, -0.11517858505249023, 0.03356520086526871, -0.11988221853971481, 0.05962863191962242, 0.031331926584243774, 0.01165146566927433, 0.04053535312414169, -0.012254932895302773, 0.02623489312827587, 0.013585558161139488, -0.00009979190508602187, -0.018686441704630852, -0.1403888463973999, -0.05877558887004852, -0.08019047230482101, 0.1041489988565445, -0.13129261136054993, -0.015496227890253067, 0.07780534029006958, 0.16626594960689545, 0.005146561656147242, -0.08184365928173065, 0.07160229980945587, 0.001753261312842369, -0.037221912294626236, -0.054331280291080475, 0.008921345695853233, -0.021079037338495255, 0.03720764070749283, 0.0285306628793478, -0.19720233976840973, -0.15638479590415955, 0.07311706990003586, 0.12362020462751389, -0.07385527342557907, -0.09258727729320526, -0.06859109550714493, -0.06466154009103775, -0.07428162544965744, -0.07714598625898361, 0.0703597441315651, 0.07416202127933502, 0.034917980432510376, -0.06894170492887497, -0.06864907592535019, 0.011601907201111317, 0.047449059784412384, -0.07636410742998123, 0.09480217099189758, 0.06675814837217331, -0.08946232497692108, 0.09846141934394836, -0.01692698523402214, 0.1255475878715515, 0.0528574176132679, 0.034291576594114304, -0.09107818454504013, -0.001146454829722643, 0.05322689563035965, 0.04549543187022209, 0.06550798565149307, -0.03659592568874359, 0.0296154897660017, 0.07541509717702866, -0.006956302560865879, 0.040325336158275604, -0.05182400718331337, 0.033637192100286484, 0.04368007183074951, 0.008375663310289383, 0.0337042361497879, 0.010183530859649181, -0.00401021633297205, 0.06459552049636841, 0.037668224424123764, 0.09581167995929718, -0.0172893013805151, -0.04467947781085968, -0.09731505066156387, 0.12498603761196136, -0.08640271425247192, -0.2718892991542816, -0.15933826565742493, -0.04673224687576294, -0.040496136993169785, -0.014016633853316307, 0.06554629653692245, -0.006446045357733965, -0.09074131399393082, -0.10171248763799667, 0.030215397477149963, 0.009564542211592197, -0.11941596120595932, -0.04469931498169899, 0.05892195552587509, 0.006152908317744732, -0.1652248501777649, 0.04100121185183525, 0.048397380858659744, -0.05127715691924095, -0.00191402155905962, 0.08675790578126907, 0.14429467916488647, 0.0763162225484848, 0.0563228465616703, -0.0315299928188324, -0.011636474169790745, 0.2025286853313446, -0.10098323225975037, 0.035340722650289536, 0.11796455830335617, -0.058468375355005264, 0.07250722497701645, 0.17476314306259155, 0.010050414130091667, -0.10126832127571106, 0.05294828116893768, 0.09089633822441101, -0.06621165573596954, -0.2423810362815857, -0.10602545738220215, -0.015706945210695267, -0.00003618446135078557, 0.1055552288889885, 0.05716564133763313, 0.02446110174059868, 0.016780123114585876, -0.11363230645656586, -0.01272101141512394, -0.050797928124666214, 0.08371865004301071, 0.06089850887656212, -0.010099831037223339, 0.04710553586483002, -0.0373704619705677, 0.024678237736225128, 0.11339165270328522, 0.03670966997742653, 0.1375073343515396, -0.02961164526641369, 0.17486229538917542, 0.09183990210294724, 0.10011272132396698, -0.04554184526205063, 0.03734495863318443, -0.0033602796029299498, 0.06953831017017365, -0.012087875977158546, -0.10303348302841187, -0.05568214878439903, 0.09299099445343018, 0.018381422385573387, -0.07148313522338867, 0.020182901993393898, -0.054481446743011475, 0.04058822989463806, 0.18743479251861572, -0.022499525919556618, -0.14442859590053558, -0.059014491736888885, 0.05824627727270126, -0.025303170084953308, -0.07997777312994003, -0.02994079887866974, 0.06549569964408875, -0.14459098875522614, 0.024838237091898918, -0.021251685917377472, 0.08064775913953781, -0.13130906224250793, -0.01785871759057045, -0.025728793814778328, 0.030086364597082138, 0.003951613325625658, 0.1137540340423584, -0.11386748403310776, 0.11285658925771713, 0.006058480124920607, 0.010935532860457897, -0.10184257477521896, 0.04130226746201515, -0.057000406086444855, -0.0378207191824913, 0.14205406606197357, -0.017261609435081482, -0.07011072337627411, -0.05146528035402298, -0.12011966854333878, -0.00962403230369091, 0.08129506558179855, -0.13530372083187103, 0.11346054077148438, 0.025459298864006996, -0.02357567474246025, -0.02481076493859291, -0.012971121817827225, -0.12195636332035065, -0.23198196291923523, 0.10912048816680908, -0.10229594260454178, 0.07193301618099213, -0.049468476325273514, -0.04190588742494583, -0.05948297679424286, 0.1818200796842575, -0.0863877534866333, -0.04501494765281677, -0.11758358031511307, 0.03323160111904144, 0.17951449751853943, -0.04813821613788605, 0.05919669568538666, -0.047340065240859985, 0.1745854765176773, -0.00973374955356121, -0.04472276195883751, -0.017611172050237656, -0.08936870098114014, -0.16219156980514526, -0.04232599213719368, 0.13935016095638275, 0.061480868607759476, 0.018609289079904556, 0.009485456161201, 0.045624278485774994, 0.027063492685556412, -0.0896066427230835, 0.026421045884490013, 0.11583738029003143, 0.12864135205745697, 0.02717042714357376, -0.03565632179379463, -0.07850480824708939, -0.1100444495677948, -0.09482334554195404, 0.07679209858179092, 0.1545289307832718, -0.06882855296134949, 0.1601930856704712, 0.12801451981067657, -0.10576099157333374, -0.19426356256008148, -0.058984048664569855, 0.028982635587453842, -0.028169460594654083, 0.11964249610900879, -0.18477903306484222, 0.06821134686470032, 0.05311001092195511, -0.01021083164960146, 0.06116511672735214, -0.24186335504055023, -0.1367848813533783, 0.005769822280853987, 0.02930350974202156, -0.2313384711742401, -0.17459003627300262, -0.1112493947148323, -0.03523702919483185, -0.15358811616897583, 0.13048505783081055, -0.006169783417135477, 0.023718567565083504, -0.006538316607475281, 0.06296359747648239, 0.05463860556483269, -0.061918955296278, 0.13307346403598785, 0.0024780877865850925, 0.007929263636469841, -0.10826744139194489, -0.02464844472706318, 0.01819840632379055, -0.04992257058620453, 0.09717845171689987, 0.04992103576660156, 0.053510211408138275, -0.0768514946103096, -0.034742698073387146, -0.0562426783144474, 0.04057452827692032, -0.07033675163984299, -0.049686722457408905, -0.06986119598150253, 0.08219946920871735, 0.08870897442102432, -0.01238330453634262, 0.03000251203775406, -0.03572545200586319, 0.04452195763587952, 0.20036086440086365, 0.12268375605344772, 0.04583605006337166, -0.1239890605211258, -0.0229909997433424, -0.013443943113088608, -0.01792658120393753, -0.12664952874183655, 0.036531418561935425, 0.0864316001534462, 0.04964703693985939, 0.07797645032405853, -0.02633102424442768, -0.18781250715255737, -0.0031068124808371067, 0.089633509516716, -0.11446227878332138, -0.2159128338098526, 0.032272640615701675, 0.13739366829395294, -0.1732156127691269, -0.06002457067370415, 0.09957534074783325, 0.015299398452043533, -0.03358704224228859, -0.0017525966977700591, 0.07797419279813766, 0.04317760840058327, 0.08590946346521378, 0.01647503674030304, 0.04734274744987488, -0.06897557526826859, 0.09712720662355423, 0.16152837872505188, -0.11932819336652756, 0.026484975591301918, 0.04740455001592636, -0.05215337499976158, -0.06321101635694504, 0.025975024327635765, -0.019162725657224655, 0.012371990829706192, -0.035473041236400604, 0.023717470467090607, 0.007522540166974068, 0.035419002175331116, 0.143238827586174, 0.007811763323843479, 0.041441239416599274, 0.033553142100572586, -0.004178095143288374, -0.08378381282091141, 0.08847489953041077, 0.023311732336878777, 0.039299193769693375, -0.04321888089179993, 0.03694293275475502, 0.0181947723031044, -0.005772811826318502, 0.015582025051116943, -0.039223562926054, -0.06343891471624374, -0.006040031090378761, -0.12956668436527252, 0.03780101612210274, -0.07455717772245407, 0.0009910076623782516, -0.010581833310425282, -0.018240658566355705, -0.011787725612521172, 0.005360575392842293, -0.053253982216119766, -0.0626751035451889, -0.042340680956840515, 0.12918807566165924, -0.20808856189250946, -0.001765504595823586, 0.09920531511306763, -0.05949150025844574, 0.07514621317386627, -0.006994163617491722, -0.01450935285538435, 0.016718177124857903, -0.07249321043491364, -0.015043397434055805, -0.019079990684986115, 0.04932459071278572, 0.017273487523198128, -0.15590427815914154, -0.02304546721279621, 0.0030869445763528347, -0.07713286578655243, -0.0017075772630050778, 0.057017434388399124, -0.1502971351146698, 0.014868988655507565, 0.06360294669866562, -0.03755640983581543, -0.046326614916324615, 0.03844499960541725, 0.0484427884221077, 0.004495606757700443, 0.0907687097787857, -0.002983520505949855, 0.042736418545246124, -0.15326687693595886, -0.04947180673480034, -0.00965819414705038, 0.006033116020262241, 0.017464611679315567, 0.023763151839375496, 0.035280972719192505, 0.0028233258053660393, 0.19583635032176971, -0.013525363057851791, 0.08479388058185577, 0.03533654287457466, 0.0008489846950396895, -0.039669983088970184, 0.029103608801960945, 0.020455656573176384, 0.01566360890865326, 0.02380235493183136, 0.02823823317885399, -0.01627964712679386, -0.046147409826517105, -0.05256861075758934, 0.06629858911037445, 0.14994482696056366, 0.163086399435997, -0.04775764048099518, 0.08445307612419128, -0.15835539996623993, -0.041064344346523285, 0.022247759625315666, -0.024841733276844025, 0.03779216110706329, -0.07888220250606537, 0.03957420215010643, 0.06369581073522568, -0.0997866541147232, 0.13998398184776306, -0.06704502552747726, -0.04213111475110054, -0.03152257949113846, -0.13317261636257172, -0.04517219215631485, 0.01958187110722065, 0.007928154431283474, -0.10236570984125137, 0.10230199247598648, 0.11304018646478653, -0.018163321539759636, -0.007846473716199398, 0.10063043981790543, -0.08210951834917068, -0.055859580636024475, -0.0351511612534523, 0.007731374818831682, 0.020201964303851128, -0.008369104005396366, 0.08176867663860321, 0.014808770269155502, 0.08561515063047409, 0.06748473644256592, 0.09816724061965942, 0.053762003779411316, 0.011587418615818024, -0.04511119797825813, -0.07188115268945694, -0.00432539451867342, -0.004523775074630976, -0.043702591210603714, 0.199998676776886, 0.05411098524928093, 0.019012320786714554, 0.003159809624776244, 0.20604577660560608, 0.009109649807214737, -0.06465084850788116, -0.13875651359558105, 0.09268935024738312, -0.0035273327957838774, 0.017913201823830605, 0.021720845252275467, -0.1387031376361847, 0.026932891458272934, 0.16887494921684265, 0.11211671680212021, 0.031631212681531906, 0.014419778250157833, 0.028898319229483604, 0.029828131198883057, -0.019388988614082336, 0.031947504729032516, 0.04068567603826523, 0.17834235727787018, -0.06094391644001007, 0.04943673312664032, -0.014930575154721737, -0.006569270044565201, -0.023766813799738884, 0.07847157120704651, -0.045897096395492554, 0.021039387211203575, -0.04960435628890991, 0.10863523185253143, -0.043204229325056076, -0.27209335565567017, -0.035368748009204865, -0.11123166233301163, -0.12671251595020294, -0.02200544998049736, 0.02402355521917343, -0.03312721103429794, 0.04036851227283478, 0.04489675164222717, -0.03000112809240818, 0.20681172609329224, 0.014167592860758305, -0.08080270141363144, -0.055847227573394775, 0.06897646188735962, -0.002843192545697093, 0.24970406293869019, -0.009185166098177433, 0.06977005302906036, 0.09591919928789139, -0.016512218862771988, -0.15074385702610016, -0.004973037634044886, 0.10655650496482849, -0.037153176963329315, 0.05303172767162323, 0.1662237048149109, -0.028014924377202988, 0.12416231632232666, 0.048157814890146255, -0.03591032698750496, 0.057373616844415665, 0.06485886871814728, 0.05243800953030586, -0.09865666925907135, 0.08431768417358398, -0.09130440652370453, 0.1473904550075531, 0.11585021764039993, -0.03716461360454559, -0.0021343519911170006, -0.05693047121167183, 0.05557391047477722, -0.02235880307853222, 0.11404569447040558, -0.012607320211827755, -0.16882969439029694, 0.031169746071100235, 0.006856621243059635, 0.05427880957722664, -0.22479642927646637, -0.07546660304069519, 0.13427451252937317, -0.03251224011182785, 0.0040977769531309605, 0.08805597573518753, 0.044658541679382324, 0.0011976989917457104, -0.06443159282207489, -0.08543016761541367, -0.006521135102957487, 0.12030867487192154, -0.09809380769729614, -0.04079107567667961 ]
cf8b46f78256400a3ee97dbd821e3591065585a7
# Dataset Card for Evaluation run of dball/zephyr-7b-dpo-qlora <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [dball/zephyr-7b-dpo-qlora](https://huggingface.co/dball/zephyr-7b-dpo-qlora) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_dball__zephyr-7b-dpo-qlora", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-01-25T13:56:24.756224](https://huggingface.co/datasets/open-llm-leaderboard/details_dball__zephyr-7b-dpo-qlora/blob/main/results_2024-01-25T13-56-24.756224.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.6209398622167857, "acc_stderr": 0.032737832032667495, "acc_norm": 0.6269182715114309, "acc_norm_stderr": 0.03340643373020695, "mc1": 0.28518971848225216, "mc1_stderr": 0.015805827874454892, "mc2": 0.4402682656740883, "mc2_stderr": 0.01458813596551293 }, "harness|arc:challenge|25": { "acc": 0.6006825938566553, "acc_stderr": 0.014312094557946709, "acc_norm": 0.6382252559726962, "acc_norm_stderr": 0.01404195794503808 }, "harness|hellaswag|10": { "acc": 0.6501692889862577, "acc_stderr": 0.004759416464201141, "acc_norm": 0.8492332204740092, "acc_norm_stderr": 0.0035709011883580805 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.27, "acc_stderr": 0.044619604333847394, "acc_norm": 0.27, "acc_norm_stderr": 0.044619604333847394 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.6370370370370371, "acc_stderr": 0.04153948404742398, "acc_norm": 0.6370370370370371, "acc_norm_stderr": 0.04153948404742398 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.6776315789473685, "acc_stderr": 0.03803510248351585, "acc_norm": 0.6776315789473685, "acc_norm_stderr": 0.03803510248351585 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.53, "acc_stderr": 0.05016135580465919, "acc_norm": 0.53, "acc_norm_stderr": 0.05016135580465919 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.6867924528301886, "acc_stderr": 0.028544793319055326, "acc_norm": 0.6867924528301886, "acc_norm_stderr": 0.028544793319055326 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.7152777777777778, "acc_stderr": 0.037738099906869334, "acc_norm": 0.7152777777777778, "acc_norm_stderr": 0.037738099906869334 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.45, "acc_stderr": 0.05, "acc_norm": 0.45, "acc_norm_stderr": 0.05 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.57, "acc_stderr": 0.04975698519562428, "acc_norm": 0.57, "acc_norm_stderr": 0.04975698519562428 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.33, "acc_stderr": 0.047258156262526045, "acc_norm": 0.33, "acc_norm_stderr": 0.047258156262526045 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.6011560693641619, "acc_stderr": 0.0373362665538351, "acc_norm": 0.6011560693641619, "acc_norm_stderr": 0.0373362665538351 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.4117647058823529, "acc_stderr": 0.04897104952726366, "acc_norm": 0.4117647058823529, "acc_norm_stderr": 0.04897104952726366 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.74, "acc_stderr": 0.044084400227680794, "acc_norm": 0.74, "acc_norm_stderr": 0.044084400227680794 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.502127659574468, "acc_stderr": 0.03268572658667492, "acc_norm": 0.502127659574468, "acc_norm_stderr": 0.03268572658667492 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.5, "acc_stderr": 0.047036043419179864, "acc_norm": 0.5, "acc_norm_stderr": 0.047036043419179864 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.6137931034482759, "acc_stderr": 0.04057324734419035, "acc_norm": 0.6137931034482759, "acc_norm_stderr": 0.04057324734419035 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.4074074074074074, "acc_stderr": 0.02530590624159063, "acc_norm": 0.4074074074074074, "acc_norm_stderr": 0.02530590624159063 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.4365079365079365, "acc_stderr": 0.04435932892851466, "acc_norm": 0.4365079365079365, "acc_norm_stderr": 0.04435932892851466 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.33, "acc_stderr": 0.047258156262526045, "acc_norm": 0.33, "acc_norm_stderr": 0.047258156262526045 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.7322580645161291, "acc_stderr": 0.025189006660212385, "acc_norm": 0.7322580645161291, "acc_norm_stderr": 0.025189006660212385 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.5024630541871922, "acc_stderr": 0.035179450386910616, "acc_norm": 0.5024630541871922, "acc_norm_stderr": 0.035179450386910616 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.67, "acc_stderr": 0.047258156262526066, "acc_norm": 0.67, "acc_norm_stderr": 0.047258156262526066 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.7515151515151515, "acc_stderr": 0.033744026441394036, "acc_norm": 0.7515151515151515, "acc_norm_stderr": 0.033744026441394036 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.7525252525252525, "acc_stderr": 0.030746300742124495, "acc_norm": 0.7525252525252525, "acc_norm_stderr": 0.030746300742124495 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.8652849740932642, "acc_stderr": 0.024639789097709443, "acc_norm": 0.8652849740932642, "acc_norm_stderr": 0.024639789097709443 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.6384615384615384, "acc_stderr": 0.02435958146539699, "acc_norm": 0.6384615384615384, "acc_norm_stderr": 0.02435958146539699 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.32592592592592595, "acc_stderr": 0.028578348365473075, "acc_norm": 0.32592592592592595, "acc_norm_stderr": 0.028578348365473075 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.6302521008403361, "acc_stderr": 0.03135709599613591, "acc_norm": 0.6302521008403361, "acc_norm_stderr": 0.03135709599613591 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.33774834437086093, "acc_stderr": 0.03861557546255169, "acc_norm": 0.33774834437086093, "acc_norm_stderr": 0.03861557546255169 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.781651376146789, "acc_stderr": 0.017712600528722717, "acc_norm": 0.781651376146789, "acc_norm_stderr": 0.017712600528722717 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.5092592592592593, "acc_stderr": 0.034093869469927006, "acc_norm": 0.5092592592592593, "acc_norm_stderr": 0.034093869469927006 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.7647058823529411, "acc_stderr": 0.02977177522814563, "acc_norm": 0.7647058823529411, "acc_norm_stderr": 0.02977177522814563 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.7468354430379747, "acc_stderr": 0.028304657943035303, "acc_norm": 0.7468354430379747, "acc_norm_stderr": 0.028304657943035303 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.6591928251121076, "acc_stderr": 0.031811497470553604, "acc_norm": 0.6591928251121076, "acc_norm_stderr": 0.031811497470553604 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.7480916030534351, "acc_stderr": 0.03807387116306085, "acc_norm": 0.7480916030534351, "acc_norm_stderr": 0.03807387116306085 }, "harness|hendrycksTest-international_law|5": { "acc": 0.7272727272727273, "acc_stderr": 0.04065578140908705, "acc_norm": 0.7272727272727273, "acc_norm_stderr": 0.04065578140908705 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.7407407407407407, "acc_stderr": 0.04236511258094634, "acc_norm": 0.7407407407407407, "acc_norm_stderr": 0.04236511258094634 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.7730061349693251, "acc_stderr": 0.032910995786157686, "acc_norm": 0.7730061349693251, "acc_norm_stderr": 0.032910995786157686 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.42857142857142855, "acc_stderr": 0.04697113923010212, "acc_norm": 0.42857142857142855, "acc_norm_stderr": 0.04697113923010212 }, "harness|hendrycksTest-management|5": { "acc": 0.7864077669902912, "acc_stderr": 0.040580420156460344, "acc_norm": 0.7864077669902912, "acc_norm_stderr": 0.040580420156460344 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8547008547008547, "acc_stderr": 0.02308663508684141, "acc_norm": 0.8547008547008547, "acc_norm_stderr": 0.02308663508684141 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.69, "acc_stderr": 0.04648231987117316, "acc_norm": 0.69, "acc_norm_stderr": 0.04648231987117316 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.7994891443167306, "acc_stderr": 0.014317653708594202, "acc_norm": 0.7994891443167306, "acc_norm_stderr": 0.014317653708594202 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.7138728323699421, "acc_stderr": 0.02433214677913413, "acc_norm": 0.7138728323699421, "acc_norm_stderr": 0.02433214677913413 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.4134078212290503, "acc_stderr": 0.01646981492840617, "acc_norm": 0.4134078212290503, "acc_norm_stderr": 0.01646981492840617 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.7091503267973857, "acc_stderr": 0.02600480036395213, "acc_norm": 0.7091503267973857, "acc_norm_stderr": 0.02600480036395213 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.6655948553054662, "acc_stderr": 0.026795422327893934, "acc_norm": 0.6655948553054662, "acc_norm_stderr": 0.026795422327893934 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.7006172839506173, "acc_stderr": 0.025483115601195448, "acc_norm": 0.7006172839506173, "acc_norm_stderr": 0.025483115601195448 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.46099290780141844, "acc_stderr": 0.029736592526424438, "acc_norm": 0.46099290780141844, "acc_norm_stderr": 0.029736592526424438 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.45045632333767927, "acc_stderr": 0.012707390438502346, "acc_norm": 0.45045632333767927, "acc_norm_stderr": 0.012707390438502346 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.625, "acc_stderr": 0.029408372932278746, "acc_norm": 0.625, "acc_norm_stderr": 0.029408372932278746 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.6421568627450981, "acc_stderr": 0.019393058402355435, "acc_norm": 0.6421568627450981, "acc_norm_stderr": 0.019393058402355435 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.7, "acc_stderr": 0.04389311454644287, "acc_norm": 0.7, "acc_norm_stderr": 0.04389311454644287 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.7306122448979592, "acc_stderr": 0.02840125202902294, "acc_norm": 0.7306122448979592, "acc_norm_stderr": 0.02840125202902294 }, "harness|hendrycksTest-sociology|5": { "acc": 0.8159203980099502, "acc_stderr": 0.027403859410786848, "acc_norm": 0.8159203980099502, "acc_norm_stderr": 0.027403859410786848 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.88, "acc_stderr": 0.03265986323710906, "acc_norm": 0.88, "acc_norm_stderr": 0.03265986323710906 }, "harness|hendrycksTest-virology|5": { "acc": 0.5542168674698795, "acc_stderr": 0.038695433234721015, "acc_norm": 0.5542168674698795, "acc_norm_stderr": 0.038695433234721015 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.8128654970760234, "acc_stderr": 0.029913127232368043, "acc_norm": 0.8128654970760234, "acc_norm_stderr": 0.029913127232368043 }, "harness|truthfulqa:mc|0": { "mc1": 0.28518971848225216, "mc1_stderr": 0.015805827874454892, "mc2": 0.4402682656740883, "mc2_stderr": 0.01458813596551293 }, "harness|winogrande|5": { "acc": 0.7861089187056038, "acc_stderr": 0.01152446695409025 }, "harness|gsm8k|5": { "acc": 0.33965125094768767, "acc_stderr": 0.013045045067665257 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_dball__zephyr-7b-dpo-qlora
[ "region:us" ]
2024-01-25T13:58:42+00:00
{"pretty_name": "Evaluation run of dball/zephyr-7b-dpo-qlora", "dataset_summary": "Dataset automatically created during the evaluation run of model [dball/zephyr-7b-dpo-qlora](https://huggingface.co/dball/zephyr-7b-dpo-qlora) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_dball__zephyr-7b-dpo-qlora\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-01-25T13:56:24.756224](https://huggingface.co/datasets/open-llm-leaderboard/details_dball__zephyr-7b-dpo-qlora/blob/main/results_2024-01-25T13-56-24.756224.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.6209398622167857,\n \"acc_stderr\": 0.032737832032667495,\n \"acc_norm\": 0.6269182715114309,\n \"acc_norm_stderr\": 0.03340643373020695,\n \"mc1\": 0.28518971848225216,\n \"mc1_stderr\": 0.015805827874454892,\n \"mc2\": 0.4402682656740883,\n \"mc2_stderr\": 0.01458813596551293\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.6006825938566553,\n \"acc_stderr\": 0.014312094557946709,\n \"acc_norm\": 0.6382252559726962,\n \"acc_norm_stderr\": 0.01404195794503808\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6501692889862577,\n \"acc_stderr\": 0.004759416464201141,\n \"acc_norm\": 0.8492332204740092,\n \"acc_norm_stderr\": 0.0035709011883580805\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.27,\n \"acc_stderr\": 0.044619604333847394,\n \"acc_norm\": 0.27,\n \"acc_norm_stderr\": 0.044619604333847394\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.6370370370370371,\n \"acc_stderr\": 0.04153948404742398,\n \"acc_norm\": 0.6370370370370371,\n \"acc_norm_stderr\": 0.04153948404742398\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.6776315789473685,\n \"acc_stderr\": 0.03803510248351585,\n \"acc_norm\": 0.6776315789473685,\n \"acc_norm_stderr\": 0.03803510248351585\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.53,\n \"acc_stderr\": 0.05016135580465919,\n \"acc_norm\": 0.53,\n \"acc_norm_stderr\": 0.05016135580465919\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.6867924528301886,\n \"acc_stderr\": 0.028544793319055326,\n \"acc_norm\": 0.6867924528301886,\n \"acc_norm_stderr\": 0.028544793319055326\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.7152777777777778,\n \"acc_stderr\": 0.037738099906869334,\n \"acc_norm\": 0.7152777777777778,\n \"acc_norm_stderr\": 0.037738099906869334\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.45,\n \"acc_stderr\": 0.05,\n \"acc_norm\": 0.45,\n \"acc_norm_stderr\": 0.05\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.57,\n \"acc_stderr\": 0.04975698519562428,\n \"acc_norm\": 0.57,\n \"acc_norm_stderr\": 0.04975698519562428\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.33,\n \"acc_stderr\": 0.047258156262526045,\n \"acc_norm\": 0.33,\n \"acc_norm_stderr\": 0.047258156262526045\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.6011560693641619,\n \"acc_stderr\": 0.0373362665538351,\n \"acc_norm\": 0.6011560693641619,\n \"acc_norm_stderr\": 0.0373362665538351\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.4117647058823529,\n \"acc_stderr\": 0.04897104952726366,\n \"acc_norm\": 0.4117647058823529,\n \"acc_norm_stderr\": 0.04897104952726366\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.74,\n \"acc_stderr\": 0.044084400227680794,\n \"acc_norm\": 0.74,\n \"acc_norm_stderr\": 0.044084400227680794\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.502127659574468,\n \"acc_stderr\": 0.03268572658667492,\n \"acc_norm\": 0.502127659574468,\n \"acc_norm_stderr\": 0.03268572658667492\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.5,\n \"acc_stderr\": 0.047036043419179864,\n \"acc_norm\": 0.5,\n \"acc_norm_stderr\": 0.047036043419179864\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.6137931034482759,\n \"acc_stderr\": 0.04057324734419035,\n \"acc_norm\": 0.6137931034482759,\n \"acc_norm_stderr\": 0.04057324734419035\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.4074074074074074,\n \"acc_stderr\": 0.02530590624159063,\n \"acc_norm\": 0.4074074074074074,\n \"acc_norm_stderr\": 0.02530590624159063\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.4365079365079365,\n \"acc_stderr\": 0.04435932892851466,\n \"acc_norm\": 0.4365079365079365,\n \"acc_norm_stderr\": 0.04435932892851466\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.33,\n \"acc_stderr\": 0.047258156262526045,\n \"acc_norm\": 0.33,\n \"acc_norm_stderr\": 0.047258156262526045\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.7322580645161291,\n \"acc_stderr\": 0.025189006660212385,\n \"acc_norm\": 0.7322580645161291,\n \"acc_norm_stderr\": 0.025189006660212385\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.5024630541871922,\n \"acc_stderr\": 0.035179450386910616,\n \"acc_norm\": 0.5024630541871922,\n \"acc_norm_stderr\": 0.035179450386910616\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.67,\n \"acc_stderr\": 0.047258156262526066,\n \"acc_norm\": 0.67,\n \"acc_norm_stderr\": 0.047258156262526066\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.7515151515151515,\n \"acc_stderr\": 0.033744026441394036,\n \"acc_norm\": 0.7515151515151515,\n \"acc_norm_stderr\": 0.033744026441394036\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.7525252525252525,\n \"acc_stderr\": 0.030746300742124495,\n \"acc_norm\": 0.7525252525252525,\n \"acc_norm_stderr\": 0.030746300742124495\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.8652849740932642,\n \"acc_stderr\": 0.024639789097709443,\n \"acc_norm\": 0.8652849740932642,\n \"acc_norm_stderr\": 0.024639789097709443\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.6384615384615384,\n \"acc_stderr\": 0.02435958146539699,\n \"acc_norm\": 0.6384615384615384,\n \"acc_norm_stderr\": 0.02435958146539699\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.32592592592592595,\n \"acc_stderr\": 0.028578348365473075,\n \"acc_norm\": 0.32592592592592595,\n \"acc_norm_stderr\": 0.028578348365473075\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.6302521008403361,\n \"acc_stderr\": 0.03135709599613591,\n \"acc_norm\": 0.6302521008403361,\n \"acc_norm_stderr\": 0.03135709599613591\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.33774834437086093,\n \"acc_stderr\": 0.03861557546255169,\n \"acc_norm\": 0.33774834437086093,\n \"acc_norm_stderr\": 0.03861557546255169\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.781651376146789,\n \"acc_stderr\": 0.017712600528722717,\n \"acc_norm\": 0.781651376146789,\n \"acc_norm_stderr\": 0.017712600528722717\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.5092592592592593,\n \"acc_stderr\": 0.034093869469927006,\n \"acc_norm\": 0.5092592592592593,\n \"acc_norm_stderr\": 0.034093869469927006\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.7647058823529411,\n \"acc_stderr\": 0.02977177522814563,\n \"acc_norm\": 0.7647058823529411,\n \"acc_norm_stderr\": 0.02977177522814563\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.7468354430379747,\n \"acc_stderr\": 0.028304657943035303,\n \"acc_norm\": 0.7468354430379747,\n \"acc_norm_stderr\": 0.028304657943035303\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.6591928251121076,\n \"acc_stderr\": 0.031811497470553604,\n \"acc_norm\": 0.6591928251121076,\n \"acc_norm_stderr\": 0.031811497470553604\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.7480916030534351,\n \"acc_stderr\": 0.03807387116306085,\n \"acc_norm\": 0.7480916030534351,\n \"acc_norm_stderr\": 0.03807387116306085\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.7272727272727273,\n \"acc_stderr\": 0.04065578140908705,\n \"acc_norm\": 0.7272727272727273,\n \"acc_norm_stderr\": 0.04065578140908705\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.7407407407407407,\n \"acc_stderr\": 0.04236511258094634,\n \"acc_norm\": 0.7407407407407407,\n \"acc_norm_stderr\": 0.04236511258094634\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.7730061349693251,\n \"acc_stderr\": 0.032910995786157686,\n \"acc_norm\": 0.7730061349693251,\n \"acc_norm_stderr\": 0.032910995786157686\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.42857142857142855,\n \"acc_stderr\": 0.04697113923010212,\n \"acc_norm\": 0.42857142857142855,\n \"acc_norm_stderr\": 0.04697113923010212\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.7864077669902912,\n \"acc_stderr\": 0.040580420156460344,\n \"acc_norm\": 0.7864077669902912,\n \"acc_norm_stderr\": 0.040580420156460344\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8547008547008547,\n \"acc_stderr\": 0.02308663508684141,\n \"acc_norm\": 0.8547008547008547,\n \"acc_norm_stderr\": 0.02308663508684141\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.69,\n \"acc_stderr\": 0.04648231987117316,\n \"acc_norm\": 0.69,\n \"acc_norm_stderr\": 0.04648231987117316\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.7994891443167306,\n \"acc_stderr\": 0.014317653708594202,\n \"acc_norm\": 0.7994891443167306,\n \"acc_norm_stderr\": 0.014317653708594202\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.7138728323699421,\n \"acc_stderr\": 0.02433214677913413,\n \"acc_norm\": 0.7138728323699421,\n \"acc_norm_stderr\": 0.02433214677913413\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.4134078212290503,\n \"acc_stderr\": 0.01646981492840617,\n \"acc_norm\": 0.4134078212290503,\n \"acc_norm_stderr\": 0.01646981492840617\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.7091503267973857,\n \"acc_stderr\": 0.02600480036395213,\n \"acc_norm\": 0.7091503267973857,\n \"acc_norm_stderr\": 0.02600480036395213\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.6655948553054662,\n \"acc_stderr\": 0.026795422327893934,\n \"acc_norm\": 0.6655948553054662,\n \"acc_norm_stderr\": 0.026795422327893934\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.7006172839506173,\n \"acc_stderr\": 0.025483115601195448,\n \"acc_norm\": 0.7006172839506173,\n \"acc_norm_stderr\": 0.025483115601195448\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.46099290780141844,\n \"acc_stderr\": 0.029736592526424438,\n \"acc_norm\": 0.46099290780141844,\n \"acc_norm_stderr\": 0.029736592526424438\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.45045632333767927,\n \"acc_stderr\": 0.012707390438502346,\n \"acc_norm\": 0.45045632333767927,\n \"acc_norm_stderr\": 0.012707390438502346\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.625,\n \"acc_stderr\": 0.029408372932278746,\n \"acc_norm\": 0.625,\n \"acc_norm_stderr\": 0.029408372932278746\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.6421568627450981,\n \"acc_stderr\": 0.019393058402355435,\n \"acc_norm\": 0.6421568627450981,\n \"acc_norm_stderr\": 0.019393058402355435\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.7,\n \"acc_stderr\": 0.04389311454644287,\n \"acc_norm\": 0.7,\n \"acc_norm_stderr\": 0.04389311454644287\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.7306122448979592,\n \"acc_stderr\": 0.02840125202902294,\n \"acc_norm\": 0.7306122448979592,\n \"acc_norm_stderr\": 0.02840125202902294\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.8159203980099502,\n \"acc_stderr\": 0.027403859410786848,\n \"acc_norm\": 0.8159203980099502,\n \"acc_norm_stderr\": 0.027403859410786848\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.88,\n \"acc_stderr\": 0.03265986323710906,\n \"acc_norm\": 0.88,\n \"acc_norm_stderr\": 0.03265986323710906\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5542168674698795,\n \"acc_stderr\": 0.038695433234721015,\n \"acc_norm\": 0.5542168674698795,\n \"acc_norm_stderr\": 0.038695433234721015\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.8128654970760234,\n \"acc_stderr\": 0.029913127232368043,\n \"acc_norm\": 0.8128654970760234,\n \"acc_norm_stderr\": 0.029913127232368043\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.28518971848225216,\n \"mc1_stderr\": 0.015805827874454892,\n \"mc2\": 0.4402682656740883,\n \"mc2_stderr\": 0.01458813596551293\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7861089187056038,\n \"acc_stderr\": 0.01152446695409025\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.33965125094768767,\n \"acc_stderr\": 0.013045045067665257\n }\n}\n```", "repo_url": "https://huggingface.co/dball/zephyr-7b-dpo-qlora", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_01_25T13_56_24.756224", "path": ["**/details_harness|arc:challenge|25_2024-01-25T13-56-24.756224.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-01-25T13-56-24.756224.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_01_25T13_56_24.756224", "path": ["**/details_harness|gsm8k|5_2024-01-25T13-56-24.756224.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-01-25T13-56-24.756224.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_01_25T13_56_24.756224", "path": ["**/details_harness|hellaswag|10_2024-01-25T13-56-24.756224.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-01-25T13-56-24.756224.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_01_25T13_56_24.756224", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-25T13-56-24.756224.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-25T13-56-24.756224.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-25T13-56-24.756224.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-25T13-56-24.756224.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-25T13-56-24.756224.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-25T13-56-24.756224.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-25T13-56-24.756224.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-25T13-56-24.756224.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-25T13-56-24.756224.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-25T13-56-24.756224.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-25T13-56-24.756224.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-25T13-56-24.756224.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-25T13-56-24.756224.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-25T13-56-24.756224.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-25T13-56-24.756224.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-25T13-56-24.756224.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-25T13-56-24.756224.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-25T13-56-24.756224.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-25T13-56-24.756224.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-25T13-56-24.756224.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-25T13-56-24.756224.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-25T13-56-24.756224.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-25T13-56-24.756224.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-25T13-56-24.756224.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-25T13-56-24.756224.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-25T13-56-24.756224.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-25T13-56-24.756224.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-25T13-56-24.756224.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-25T13-56-24.756224.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-25T13-56-24.756224.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-25T13-56-24.756224.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-25T13-56-24.756224.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-25T13-56-24.756224.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-25T13-56-24.756224.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-25T13-56-24.756224.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-25T13-56-24.756224.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-25T13-56-24.756224.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-25T13-56-24.756224.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-25T13-56-24.756224.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-25T13-56-24.756224.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-25T13-56-24.756224.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-25T13-56-24.756224.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-25T13-56-24.756224.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-25T13-56-24.756224.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-25T13-56-24.756224.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-25T13-56-24.756224.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-25T13-56-24.756224.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-25T13-56-24.756224.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-25T13-56-24.756224.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-25T13-56-24.756224.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-25T13-56-24.756224.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-25T13-56-24.756224.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-25T13-56-24.756224.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-25T13-56-24.756224.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-25T13-56-24.756224.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-25T13-56-24.756224.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-25T13-56-24.756224.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-25T13-56-24.756224.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-25T13-56-24.756224.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-25T13-56-24.756224.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-25T13-56-24.756224.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-25T13-56-24.756224.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-25T13-56-24.756224.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-25T13-56-24.756224.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-25T13-56-24.756224.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-25T13-56-24.756224.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-25T13-56-24.756224.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-25T13-56-24.756224.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-25T13-56-24.756224.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-25T13-56-24.756224.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-25T13-56-24.756224.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-25T13-56-24.756224.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-25T13-56-24.756224.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-25T13-56-24.756224.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-25T13-56-24.756224.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-25T13-56-24.756224.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-25T13-56-24.756224.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-25T13-56-24.756224.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-25T13-56-24.756224.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-25T13-56-24.756224.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-25T13-56-24.756224.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-25T13-56-24.756224.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-25T13-56-24.756224.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-25T13-56-24.756224.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-25T13-56-24.756224.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-25T13-56-24.756224.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-25T13-56-24.756224.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-25T13-56-24.756224.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-25T13-56-24.756224.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-25T13-56-24.756224.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-25T13-56-24.756224.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-25T13-56-24.756224.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-25T13-56-24.756224.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-25T13-56-24.756224.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-25T13-56-24.756224.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-25T13-56-24.756224.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-25T13-56-24.756224.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-25T13-56-24.756224.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-25T13-56-24.756224.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-25T13-56-24.756224.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-25T13-56-24.756224.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-25T13-56-24.756224.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-25T13-56-24.756224.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-25T13-56-24.756224.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-25T13-56-24.756224.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-25T13-56-24.756224.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-25T13-56-24.756224.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-25T13-56-24.756224.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-25T13-56-24.756224.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-25T13-56-24.756224.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-25T13-56-24.756224.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-25T13-56-24.756224.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-25T13-56-24.756224.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-25T13-56-24.756224.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_01_25T13_56_24.756224", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-25T13-56-24.756224.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-25T13-56-24.756224.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_01_25T13_56_24.756224", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-25T13-56-24.756224.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-25T13-56-24.756224.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_01_25T13_56_24.756224", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-25T13-56-24.756224.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-25T13-56-24.756224.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_01_25T13_56_24.756224", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-25T13-56-24.756224.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-25T13-56-24.756224.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_01_25T13_56_24.756224", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-25T13-56-24.756224.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-25T13-56-24.756224.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_01_25T13_56_24.756224", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-25T13-56-24.756224.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-25T13-56-24.756224.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_01_25T13_56_24.756224", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-25T13-56-24.756224.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-25T13-56-24.756224.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_01_25T13_56_24.756224", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-25T13-56-24.756224.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-25T13-56-24.756224.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_01_25T13_56_24.756224", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-25T13-56-24.756224.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-25T13-56-24.756224.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_01_25T13_56_24.756224", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-25T13-56-24.756224.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-25T13-56-24.756224.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_01_25T13_56_24.756224", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-25T13-56-24.756224.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-25T13-56-24.756224.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_01_25T13_56_24.756224", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-25T13-56-24.756224.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-25T13-56-24.756224.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_01_25T13_56_24.756224", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-25T13-56-24.756224.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-25T13-56-24.756224.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_01_25T13_56_24.756224", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-25T13-56-24.756224.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-25T13-56-24.756224.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_01_25T13_56_24.756224", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-25T13-56-24.756224.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-25T13-56-24.756224.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_01_25T13_56_24.756224", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-25T13-56-24.756224.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-25T13-56-24.756224.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_01_25T13_56_24.756224", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-25T13-56-24.756224.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-25T13-56-24.756224.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_01_25T13_56_24.756224", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-25T13-56-24.756224.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-25T13-56-24.756224.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_01_25T13_56_24.756224", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-25T13-56-24.756224.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-25T13-56-24.756224.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_01_25T13_56_24.756224", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-25T13-56-24.756224.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-25T13-56-24.756224.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_01_25T13_56_24.756224", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-25T13-56-24.756224.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-25T13-56-24.756224.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_01_25T13_56_24.756224", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-25T13-56-24.756224.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-25T13-56-24.756224.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_01_25T13_56_24.756224", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-25T13-56-24.756224.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-25T13-56-24.756224.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_01_25T13_56_24.756224", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-25T13-56-24.756224.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-25T13-56-24.756224.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_01_25T13_56_24.756224", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-25T13-56-24.756224.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-25T13-56-24.756224.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_01_25T13_56_24.756224", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-25T13-56-24.756224.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-25T13-56-24.756224.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_01_25T13_56_24.756224", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-25T13-56-24.756224.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-25T13-56-24.756224.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_01_25T13_56_24.756224", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-25T13-56-24.756224.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-25T13-56-24.756224.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_01_25T13_56_24.756224", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-25T13-56-24.756224.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-25T13-56-24.756224.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_01_25T13_56_24.756224", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-25T13-56-24.756224.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-25T13-56-24.756224.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_01_25T13_56_24.756224", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-25T13-56-24.756224.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-25T13-56-24.756224.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_01_25T13_56_24.756224", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-25T13-56-24.756224.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-25T13-56-24.756224.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_01_25T13_56_24.756224", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-25T13-56-24.756224.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-25T13-56-24.756224.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_01_25T13_56_24.756224", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-25T13-56-24.756224.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-25T13-56-24.756224.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_01_25T13_56_24.756224", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-25T13-56-24.756224.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-25T13-56-24.756224.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_01_25T13_56_24.756224", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-25T13-56-24.756224.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-25T13-56-24.756224.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_01_25T13_56_24.756224", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-25T13-56-24.756224.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-25T13-56-24.756224.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_01_25T13_56_24.756224", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-25T13-56-24.756224.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-25T13-56-24.756224.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_01_25T13_56_24.756224", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-25T13-56-24.756224.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-25T13-56-24.756224.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_01_25T13_56_24.756224", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-25T13-56-24.756224.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-25T13-56-24.756224.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_01_25T13_56_24.756224", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-25T13-56-24.756224.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-25T13-56-24.756224.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_01_25T13_56_24.756224", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-25T13-56-24.756224.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-25T13-56-24.756224.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_01_25T13_56_24.756224", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-25T13-56-24.756224.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-25T13-56-24.756224.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_01_25T13_56_24.756224", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-25T13-56-24.756224.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-25T13-56-24.756224.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_01_25T13_56_24.756224", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-25T13-56-24.756224.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-25T13-56-24.756224.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_01_25T13_56_24.756224", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-25T13-56-24.756224.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-25T13-56-24.756224.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_01_25T13_56_24.756224", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-25T13-56-24.756224.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-25T13-56-24.756224.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_01_25T13_56_24.756224", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-25T13-56-24.756224.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-25T13-56-24.756224.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_01_25T13_56_24.756224", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-25T13-56-24.756224.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-25T13-56-24.756224.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_01_25T13_56_24.756224", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-25T13-56-24.756224.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-25T13-56-24.756224.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_01_25T13_56_24.756224", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-25T13-56-24.756224.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-25T13-56-24.756224.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_01_25T13_56_24.756224", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-25T13-56-24.756224.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-25T13-56-24.756224.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_01_25T13_56_24.756224", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-25T13-56-24.756224.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-25T13-56-24.756224.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_01_25T13_56_24.756224", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-25T13-56-24.756224.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-25T13-56-24.756224.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_01_25T13_56_24.756224", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-25T13-56-24.756224.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-25T13-56-24.756224.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_01_25T13_56_24.756224", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-25T13-56-24.756224.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-25T13-56-24.756224.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_01_25T13_56_24.756224", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-25T13-56-24.756224.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-25T13-56-24.756224.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_01_25T13_56_24.756224", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-25T13-56-24.756224.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-25T13-56-24.756224.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_01_25T13_56_24.756224", "path": ["**/details_harness|winogrande|5_2024-01-25T13-56-24.756224.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-01-25T13-56-24.756224.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_01_25T13_56_24.756224", "path": ["results_2024-01-25T13-56-24.756224.parquet"]}, {"split": "latest", "path": ["results_2024-01-25T13-56-24.756224.parquet"]}]}]}
2024-01-25T13:59:08+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of dball/zephyr-7b-dpo-qlora Dataset automatically created during the evaluation run of model dball/zephyr-7b-dpo-qlora on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-01-25T13:56:24.756224(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of dball/zephyr-7b-dpo-qlora\n\n\n\nDataset automatically created during the evaluation run of model dball/zephyr-7b-dpo-qlora on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-25T13:56:24.756224(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of dball/zephyr-7b-dpo-qlora\n\n\n\nDataset automatically created during the evaluation run of model dball/zephyr-7b-dpo-qlora on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-25T13:56:24.756224(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ 6, 187, 68, 4, 40, 29, 3, 4, 9, 6, 5, 7, 4, 7, 10, 9, 5, 9, 8, 10, 46, 8, 7, 10, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of dball/zephyr-7b-dpo-qlora\n\n\n\nDataset automatically created during the evaluation run of model dball/zephyr-7b-dpo-qlora on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2024-01-25T13:56:24.756224(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):## Dataset Details### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:## Uses### Direct Use### Out-of-Scope Use## Dataset Structure## Dataset Creation### Curation Rationale### Source Data#### Data Collection and Processing#### Who are the source data producers?### Annotations [optional]#### Annotation process#### Who are the annotators?#### Personal and Sensitive Information## Bias, Risks, and Limitations### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:## Glossary [optional]## More Information [optional]## Dataset Card Authors [optional]## Dataset Card Contact" ]
[ -0.05716709792613983, 0.2163788378238678, -0.0058339908719062805, 0.03938574343919754, 0.09141074120998383, -0.014068014919757843, 0.03204193711280823, 0.09829306602478027, 0.01163622085005045, 0.17752599716186523, -0.021341092884540558, 0.11291512101888657, 0.0714859738945961, 0.12437248229980469, 0.024024425074458122, -0.1381535828113556, 0.019358353689312935, -0.08534902334213257, 0.06650665402412415, 0.0836254209280014, 0.0671859160065651, -0.08316951990127563, 0.06274344027042389, -0.027459297329187393, 0.02976825274527073, -0.01859699934720993, -0.08573994040489197, -0.0294991172850132, 0.09509195387363434, 0.11333011090755463, 0.04161951690912247, -0.015471506863832474, 0.02048208937048912, -0.2722088694572449, 0.015908747911453247, 0.09494456648826599, -0.0017493786290287971, 0.028477951884269714, 0.13566413521766663, -0.05395316332578659, 0.08778741210699081, -0.03157775476574898, 0.0781724750995636, 0.05566490814089775, -0.10569261014461517, -0.14692595601081848, -0.1411873698234558, -0.012277173809707165, 0.07020959258079529, 0.05074497312307358, -0.025952547788619995, 0.13691985607147217, -0.06699968129396439, 0.04469849914312363, 0.12201247364282608, -0.10019998252391815, -0.01599196158349514, 0.0655137300491333, 0.012238642200827599, 0.06766197830438614, -0.08719839155673981, -0.029677720740437508, 0.028720824047923088, 0.05310211703181267, -0.0002501746639609337, 0.01325178425759077, 0.0015513254329562187, 0.01296136062592268, -0.1366642713546753, -0.12662601470947266, 0.14459942281246185, 0.005963922943919897, -0.044392138719558716, -0.183782160282135, -0.010813353583216667, 0.012853089720010757, 0.006179693154990673, -0.023138493299484253, 0.0025428212247788906, -0.02506139501929283, 0.10010014474391937, -0.004896393045783043, -0.10201884806156158, -0.03014538437128067, -0.016810793429613113, 0.08566119521856308, 0.016101114451885223, -0.015847347676753998, -0.0007478026673197746, 0.12019366770982742, 0.012676188722252846, -0.0754263699054718, -0.08171358704566956, -0.061940260231494904, -0.13125094771385193, -0.04817281663417816, 0.0015747096622362733, -0.08305758982896805, 0.044473469257354736, 0.22753068804740906, -0.011354144662618637, 0.030805686488747597, -0.09427303075790405, 0.0016470456030219793, 0.11685726046562195, 0.06461125612258911, -0.07514190673828125, -0.049647994339466095, -0.0327146016061306, 0.031959183514118195, 0.028337471187114716, -0.015574895776808262, 0.0061032408848404884, 0.052691712975502014, 0.035697538405656815, 0.11503448337316513, 0.11813098937273026, 0.04435675963759422, -0.06780125200748444, -0.029411960393190384, 0.23542043566703796, -0.1428648978471756, -0.025965794920921326, 0.02589314430952072, -0.054105158895254135, -0.1242356225848198, 0.06022333726286888, -0.005349406972527504, -0.04598378762602806, 0.1414136290550232, -0.04487472027540207, -0.07866111397743225, -0.07421154528856277, -0.059336159378290176, 0.055354028940200806, 0.03129906952381134, -0.03713274747133255, -0.07715437561273575, -0.08278444409370422, -0.08711984008550644, 0.024904251098632812, -0.05585017800331116, -0.032775819301605225, 0.008490427397191525, -0.005127689801156521, -0.014949755743145943, -0.005683875642716885, 0.10774560272693634, -0.05085551738739014, 0.045546822249889374, -0.0007834540447220206, 0.012962241657078266, 0.0831926167011261, 0.048156727105379105, -0.11743535101413727, 0.07082410156726837, -0.15136492252349854, 0.09860855340957642, -0.11643210053443909, -0.016146143898367882, -0.1235729306936264, -0.012838071212172508, -0.017433565109968185, 0.03387182205915451, -0.02829936519265175, 0.09393368661403656, -0.2251303493976593, 0.008707639761269093, 0.13872620463371277, -0.11235621571540833, -0.08159530162811279, 0.07691490650177002, -0.049462925642728806, 0.04997748136520386, 0.03729996085166931, 0.10886161029338837, 0.11977051198482513, -0.07574451714754105, -0.08612778782844543, -0.059988461434841156, -0.03162801265716553, 0.16288280487060547, 0.06362053751945496, -0.08466701209545135, 0.11757215857505798, 0.054614052176475525, -0.022849708795547485, -0.07326550781726837, 0.008362101390957832, -0.0663912296295166, -0.0034655528143048286, -0.07421945035457611, -0.037023670971393585, -0.00501626543700695, -0.0740203857421875, -0.00542157469317317, -0.07956969738006592, -0.015500374138355255, 0.09609818458557129, -0.013727046549320221, 0.0028068656101822853, -0.06768292188644409, 0.02706916630268097, 0.017244577407836914, 0.015562751330435276, -0.22195062041282654, -0.12293002009391785, 0.02864593267440796, -0.19258058071136475, 0.04816049337387085, 0.040253154933452606, 0.012587279081344604, 0.04413123428821564, -0.002723613753914833, 0.03192057088017464, 0.01323808915913105, -0.01046473253518343, -0.006000657565891743, -0.12814149260520935, -0.053849928081035614, -0.09207859635353088, 0.08723780512809753, -0.14705072343349457, -0.015015807002782822, 0.07405583560466766, 0.15089698135852814, 0.023747995495796204, -0.08628851175308228, 0.06049890071153641, 0.010898995213210583, -0.03270064294338226, -0.053977273404598236, -0.0013481327332556248, -0.02574053406715393, 0.027888894081115723, 0.014460841193795204, -0.19999800622463226, -0.09446381777524948, 0.06773847341537476, 0.13621410727500916, -0.07721802592277527, -0.1110323965549469, -0.058455124497413635, -0.06572912633419037, -0.0798453763127327, -0.06409471482038498, 0.09193232655525208, 0.09547656774520874, 0.043776191771030426, -0.06253255903720856, -0.051711034029722214, 0.01074620895087719, 0.06823360919952393, -0.06134448200464249, 0.10159434378147125, 0.08009448647499084, -0.07655347138643265, 0.10407353192567825, -0.04871553182601929, 0.1089339330792427, 0.0723833292722702, 0.021239744499325752, -0.09409939497709274, 0.009598920121788979, 0.0682210922241211, 0.03473556786775589, 0.07556905597448349, -0.0441848449409008, 0.04010611027479172, 0.08104071021080017, -0.01625571772456169, 0.03217817842960358, -0.06038956344127655, 0.0257892869412899, 0.0417051836848259, 0.004761919844895601, 0.009691163897514343, 0.018911510705947876, 0.019107971340417862, 0.07739508152008057, 0.02693309634923935, 0.08384969830513, -0.009649493731558323, -0.05160902440547943, -0.09806270897388458, 0.14715418219566345, -0.0868164449930191, -0.283072829246521, -0.1549321413040161, -0.027066707611083984, -0.045122504234313965, -0.013776525855064392, 0.06177183985710144, -0.006165695376694202, -0.09569959342479706, -0.09476499259471893, 0.047722022980451584, 0.022841863334178925, -0.13754259049892426, -0.06584008038043976, 0.05749962478876114, -0.0032788277603685856, -0.16939659416675568, 0.048549048602581024, 0.044957201927900314, -0.04568352550268173, -0.004798692185431719, 0.08739539980888367, 0.11508854478597641, 0.08737029135227203, 0.07264018058776855, -0.026200387626886368, -0.013007469475269318, 0.1779904067516327, -0.1181919202208519, 0.029120326042175293, 0.09424032270908356, -0.04865235090255737, 0.07069872319698334, 0.18242700397968292, 0.01485803909599781, -0.08653263747692108, 0.05895461142063141, 0.09425535798072815, -0.06221165880560875, -0.24215620756149292, -0.12123355269432068, -0.02741624042391777, 0.0013303980231285095, 0.10156522691249847, 0.06516417860984802, 0.01576979272067547, 0.009640151634812355, -0.11209206283092499, -0.028293702751398087, -0.050411731004714966, 0.06470786035060883, 0.08549009263515472, 0.00502926018089056, 0.04013839364051819, -0.03471998870372772, 0.014784268103539944, 0.1051347479224205, 0.044845178723335266, 0.15840432047843933, -0.05015791207551956, 0.16284754872322083, 0.07809610664844513, 0.06911337375640869, -0.03880276903510094, 0.03874093294143677, 0.005608591251075268, 0.0636846274137497, -0.017544971778988838, -0.10280802845954895, -0.060789693146944046, 0.10650480538606644, 0.039591871201992035, -0.08162841200828552, 0.025187626481056213, -0.06760253012180328, 0.02830091118812561, 0.18546459078788757, -0.03926709666848183, -0.12633638083934784, -0.053628552705049515, 0.04301522672176361, -0.031046926975250244, -0.09020650386810303, -0.0025828820653259754, 0.06956935673952103, -0.1466115415096283, 0.020541362464427948, -0.0417068749666214, 0.07192431390285492, -0.1376245617866516, -0.01703101024031639, -0.010300688445568085, 0.035260818898677826, 0.0005225350614637136, 0.12113486230373383, -0.1547478288412094, 0.08827447891235352, -0.004473202861845493, 0.016743138432502747, -0.1083260253071785, 0.05298341438174248, -0.03410874307155609, -0.05437879264354706, 0.13184165954589844, -0.01022634282708168, -0.08352742344141006, -0.05241456255316734, -0.09433729946613312, -0.005166035145521164, 0.06186932325363159, -0.1006465032696724, 0.10217072814702988, 0.03648131340742111, -0.018026892095804214, -0.03415566310286522, -0.009240712970495224, -0.10577672719955444, -0.2407885491847992, 0.1153329387307167, -0.12748754024505615, 0.06119763106107712, -0.06470493972301483, -0.044767335057258606, -0.04372387379407883, 0.1491885781288147, -0.07944828271865845, -0.06765058636665344, -0.10299541056156158, -0.013645712286233902, 0.1769144982099533, -0.05225014314055443, 0.06720899790525436, -0.03555324673652649, 0.17161425948143005, -0.018330464139580727, -0.041093260049819946, -0.001078035682439804, -0.08102456480264664, -0.1760134994983673, -0.04868512600660324, 0.10581507533788681, 0.0808379128575325, 0.009229319170117378, -0.002560832304880023, 0.00972434226423502, 0.025498969480395317, -0.09923911094665527, 0.02617095597088337, 0.11970791965723038, 0.1130785197019577, 0.03455521911382675, -0.0062526739202439785, -0.11905365437269211, -0.10782508552074432, -0.09473872184753418, 0.048716314136981964, 0.16833454370498657, -0.0666956752538681, 0.1671614944934845, 0.13915985822677612, -0.08152014017105103, -0.1784971058368683, -0.0712238997220993, 0.03356102854013443, -0.030159175395965576, 0.1250128149986267, -0.21334540843963623, 0.06359606236219406, 0.06966346502304077, -0.029407253488898277, 0.13009002804756165, -0.2560175657272339, -0.13330993056297302, 0.0462813638150692, 0.03662648797035217, -0.23488324880599976, -0.17106401920318604, -0.11035273224115372, -0.018342159688472748, -0.16852067410945892, 0.12683773040771484, 0.04196900501847267, 0.0276383925229311, -0.02427911013364792, 0.0993669182062149, 0.06472983956336975, -0.06915492564439774, 0.1311253011226654, -0.0020538652315735817, 0.019940810278058052, -0.10762490332126617, -0.05049353092908859, -0.013744309544563293, -0.041516926139593124, 0.07519003003835678, 0.03050718829035759, 0.059215184301137924, -0.08994480222463608, -0.03663191199302673, -0.06791061162948608, 0.04592283070087433, -0.073023721575737, -0.062480464577674866, -0.06960378587245941, 0.08427261561155319, 0.08151011168956757, -0.009305267594754696, 0.012206187471747398, -0.046587832272052765, 0.03892311453819275, 0.21492427587509155, 0.11069287359714508, 0.05596407502889633, -0.12131137400865555, -0.034080199897289276, -0.009951475076377392, 0.0030384601559489965, -0.09943117201328278, 0.04030860215425491, 0.09740191698074341, 0.051196470856666565, 0.07118469476699829, -0.021610146388411522, -0.18126815557479858, 0.003795425873249769, 0.07312285900115967, -0.09242665767669678, -0.20150884985923767, 0.02671329490840435, 0.14585483074188232, -0.1568409949541092, -0.061088621616363525, 0.07457971572875977, 0.022117605432868004, -0.036287903785705566, -0.00031205639243125916, 0.0721147358417511, 0.06490457057952881, 0.10820632427930832, 0.015288970433175564, 0.05551295727491379, -0.06776493787765503, 0.08631055802106857, 0.13615068793296814, -0.11995784938335419, 0.0049631958827376366, 0.031024403870105743, -0.06421460211277008, -0.06895066797733307, -0.019232692196965218, -0.01515372283756733, 0.018125995993614197, -0.03350654989480972, 0.03075772151350975, -0.022110478952527046, 0.05569995194673538, 0.11853697150945663, 0.00817934237420559, 0.04919161647558212, 0.015645943582057953, -0.005376182496547699, -0.05681289732456207, 0.10328585654497147, 0.027291858568787575, 0.04920215904712677, -0.04704459384083748, 0.022957947105169296, 0.004589826799929142, -0.023085372522473335, 0.018837429583072662, -0.046541765332221985, -0.06457071751356125, -0.0016642720438539982, -0.16437259316444397, 0.052047014236450195, -0.0791296660900116, 0.012291472405195236, 0.000129757565446198, -0.01412486843764782, 0.010470983572304249, 0.008280294016003609, -0.0759340226650238, -0.04858759045600891, -0.04433083534240723, 0.12736667692661285, -0.1904664933681488, -0.009883031249046326, 0.07990451157093048, -0.07046935707330704, 0.07439205050468445, -0.007501070387661457, -0.01997232437133789, 0.014515918679535389, -0.08789967000484467, -0.0026718340814113617, -0.02780453860759735, 0.06594671308994293, 0.008298463188111782, -0.13860902190208435, -0.017955411225557327, -0.003307418432086706, -0.08427506685256958, -0.012351318262517452, 0.029544364660978317, -0.1685655117034912, 0.04214680194854736, 0.08712649345397949, -0.040667127817869186, -0.05009480565786362, 0.0340203158557415, 0.05775928497314453, -0.002969285473227501, 0.10452671349048615, 0.0007040053606033325, 0.03951648250222206, -0.141166090965271, -0.04888366162776947, 0.0010055144084617496, 0.016855843365192413, 0.051690056920051575, 0.028148509562015533, 0.026930779218673706, 0.0025561898946762085, 0.21302659809589386, -0.017323574051260948, 0.04146549478173256, 0.020200300961732864, -0.008037665858864784, -0.017993323504924774, 0.03140395134687424, 0.02108035981655121, 0.0041209799237549305, 0.020663481205701828, 0.029374251142144203, -0.02613252028822899, -0.06500086933374405, -0.017339708283543587, 0.06737155467271805, 0.13223156332969666, 0.14264927804470062, -0.04186679422855377, 0.06115464121103287, -0.16165485978126526, -0.05871567875146866, 0.026752974838018417, -0.032990992069244385, 0.04203033447265625, -0.0796922892332077, 0.05725160986185074, 0.07800133526325226, -0.09226609766483307, 0.1470184624195099, -0.06294344365596771, -0.02818603813648224, -0.02543589472770691, -0.1605435460805893, -0.0348505973815918, 0.023481059819459915, 0.007568048313260078, -0.08420498669147491, 0.11084409058094025, 0.13181371986865997, -0.016167808324098587, -0.01536257378757, 0.09258249402046204, -0.04815857857465744, -0.06345654278993607, -0.021543966606259346, 0.002509552985429764, 0.00988821778446436, 0.013767041265964508, 0.0768086165189743, 0.019044114276766777, 0.05620560795068741, 0.0671379417181015, 0.09005683660507202, 0.02617846429347992, 0.008184421807527542, -0.03810052201151848, -0.04507935792207718, 0.000661155441775918, -0.012548089027404785, -0.05801084637641907, 0.2188589870929718, 0.05060335248708725, 0.01878194697201252, 0.008843573741614819, 0.22135230898857117, -0.004505515098571777, -0.07191058993339539, -0.12312681972980499, 0.137863427400589, 0.012838033959269524, 0.02986520528793335, 0.029104825109243393, -0.12670589983463287, 0.033004745841026306, 0.1448487937450409, 0.1023659035563469, 0.04511405527591705, 0.012004719115793705, 0.03930869325995445, 0.024699576199054718, -0.02807668037712574, 0.05528731644153595, 0.030972663313150406, 0.23438474535942078, -0.05548451095819473, 0.07332531362771988, -0.010966204106807709, 0.0020907577127218246, -0.015669453889131546, 0.10453112423419952, -0.028970563784241676, 0.01668311282992363, -0.06960156559944153, 0.09700034558773041, -0.07041490823030472, -0.2584557831287384, -0.02248801663517952, -0.06095336377620697, -0.1362956166267395, -0.014694505371153355, 0.023487459868192673, -0.019779331982135773, 0.044899698346853256, 0.031260792165994644, -0.025133417919278145, 0.19367919862270355, 0.0021041606087237597, -0.0827910304069519, -0.0593370646238327, 0.07000696659088135, -0.03384482488036156, 0.2717955708503723, -0.0007791111711412668, 0.07988475263118744, 0.090044304728508, -0.007703817915171385, -0.13341008126735687, 0.02780061960220337, 0.09659456461668015, -0.05714523792266846, 0.06854778528213501, 0.1635078489780426, -0.015998445451259613, 0.14783111214637756, 0.033208590000867844, -0.02753087878227234, 0.07520614564418793, 0.08650004863739014, 0.040661025792360306, -0.10345422476530075, 0.08668182790279388, -0.08681251108646393, 0.12925386428833008, 0.10611255466938019, -0.013074515387415886, 0.0013160421513020992, -0.052608318626880646, 0.05592651292681694, -0.031420059502124786, 0.13838794827461243, -0.023391541093587875, -0.14689359068870544, 0.041431546211242676, 0.007326250895857811, 0.05243058502674103, -0.22286398708820343, -0.05220635235309601, 0.11030136048793793, -0.049257200211286545, 0.029971066862344742, 0.08052684366703033, 0.03039449267089367, 0.012785527855157852, -0.06490403413772583, -0.08787088096141815, -0.0033102065790444613, 0.11882514506578445, -0.1003623753786087, -0.043088965117931366 ]
6a96bb1a6c6cc4047d43516977fe122a9631f784
--- Description: - This dataset comprises images of bone cancer annotated with bounding boxes for object detection tasks. It is a combination of two distinct datasets: one sourced from Roboflow, featuring images of tumor-affected bones, and another obtained from the FracAtlas dataset, containing images of healthy bones. --- Task: - Object Detection - Classification --- Annotations: - Bounding Boxes --- Data Source: - Roboflow Dataset: Contains images of bones affected by tumors, sourced from Roboflow. - FracAtlas Dataset: Comprises images of healthy bones, extracted from the FracAtlas dataset. ---
VibhuRaj01/Bone_Tumor
[ "task_categories:object-detection", "size_categories:1K<n<10K", "biology", "medical", "region:us" ]
2024-01-25T14:05:00+00:00
{"size_categories": ["1K<n<10K"], "task_categories": ["object-detection"], "tags": ["biology", "medical"]}
2024-02-03T07:00:12+00:00
[]
[]
TAGS #task_categories-object-detection #size_categories-1K<n<10K #biology #medical #region-us
--- Description: - This dataset comprises images of bone cancer annotated with bounding boxes for object detection tasks. It is a combination of two distinct datasets: one sourced from Roboflow, featuring images of tumor-affected bones, and another obtained from the FracAtlas dataset, containing images of healthy bones. --- Task: - Object Detection - Classification --- Annotations: - Bounding Boxes --- Data Source: - Roboflow Dataset: Contains images of bones affected by tumors, sourced from Roboflow. - FracAtlas Dataset: Comprises images of healthy bones, extracted from the FracAtlas dataset. ---
[]
[ "TAGS\n#task_categories-object-detection #size_categories-1K<n<10K #biology #medical #region-us \n" ]
[ 35 ]
[ "passage: TAGS\n#task_categories-object-detection #size_categories-1K<n<10K #biology #medical #region-us \n" ]
[ 0.028025580570101738, -0.036591414362192154, -0.005411012098193169, -0.10623408854007721, 0.07606931030750275, 0.010923225432634354, 0.0518246665596962, 0.20473003387451172, 0.30296123027801514, 0.13258697092533112, 0.12482518702745438, 0.06590698659420013, -0.02891077660024166, 0.1360466182231903, -0.03970194607973099, -0.21342052519321442, 0.07356423884630203, 0.05504605546593666, -0.02206500433385372, 0.042397599667310715, -0.029948078095912933, -0.09149082005023956, 0.030486654490232468, -0.08858097344636917, 0.032982535660266876, -0.00014463983825407922, 0.060738906264305115, -0.05795693397521973, 0.08446936309337616, -0.16166448593139648, 0.08321162313222885, -0.02204195037484169, -0.023135896772146225, -0.21765254437923431, -0.014456651173532009, -0.045845188200473785, 0.02499815635383129, 0.05465078353881836, 0.06410825252532959, -0.03684530034661293, 0.005961006972938776, -0.27205556631088257, 0.05468009039759636, 0.014517875388264656, -0.06984679400920868, -0.1586979180574417, 0.021583525463938713, 0.05685881897807121, -0.08119143545627594, 0.03946787863969803, 0.0069769141264259815, 0.23055905103683472, -0.13009150326251984, 0.045537155121564865, 0.22905495762825012, -0.14123837649822235, 0.016474617645144463, 0.20946921408176422, 0.19141227006912231, 0.1469443291425705, -0.02943967469036579, 0.06652912497520447, -0.022300871089100838, -0.010278663598001003, 0.020025739446282387, -0.05313854292035103, -0.03209281712770462, 0.01014180202037096, -0.061350882053375244, -0.02809927426278591, 0.21548545360565186, 0.0017638084245845675, 0.09402227401733398, -0.09078975766897202, -0.08539220690727234, -0.12990222871303558, -0.07774782180786133, -0.0068077994510531425, 0.02587716467678547, -0.05039399489760399, 0.07845178991556168, 0.03393171727657318, 0.011762745678424835, -0.05537101626396179, -0.13777050375938416, 0.12018055468797684, -0.06022227555513382, 0.057032182812690735, -0.05639269948005676, -0.025776252150535583, -0.19941239058971405, -0.017194662243127823, 0.04296868294477463, 0.010914470069110394, -0.19333061575889587, -0.11076968908309937, 0.05018044263124466, 0.007040409836918116, 0.2612156271934509, 0.0052050016820430756, 0.00416147243231535, 0.015603368170559406, -0.07935498654842377, 0.0918305367231369, 0.10110842436552048, -0.03022848255932331, -0.18616530299186707, -0.02391262911260128, -0.03235475718975067, 0.039295487105846405, -0.047796957194805145, 0.030868200585246086, -0.02277320809662342, -0.09385232627391815, 0.09081430733203888, 0.08110442757606506, 0.04483775049448013, -0.059150781482458115, -0.008274640887975693, -0.05305038392543793, -0.06939893960952759, 0.0015986347571015358, -0.0723256915807724, 0.05236056074500084, 0.03952943906188011, 0.07337792962789536, -0.11007032543420792, -0.051623981446027756, 0.03896975889801979, 0.0512019544839859, -0.14661823213100433, -0.02371979132294655, 0.0650409534573555, -0.13016687333583832, 0.032728035002946854, -0.09866145998239517, 0.07303755730390549, -0.11829651147127151, -0.030073178932070732, -0.0011870964663103223, -0.053127143532037735, -0.06839416176080704, 0.023821182548999786, 0.0370786152780056, -0.04873069003224373, -0.040319010615348816, -0.004372105468064547, 0.06486406922340393, -0.10937634855508804, 0.06441789120435715, -0.11042331904172897, 0.18698807060718536, -0.12265697121620178, -0.05304263159632683, -0.10501279681921005, 0.1045045331120491, -0.17403367161750793, 0.006985052954405546, -0.18049579858779907, 0.16296279430389404, -0.10820581018924713, 0.0008202853496186435, -0.16064633429050446, -0.09123818576335907, -0.025518974289298058, 0.07248097658157349, 0.005031202454119921, -0.051814887672662735, 0.10464784502983093, 0.10950464755296707, -0.12763488292694092, 0.006093399599194527, 0.052151307463645935, 0.14057086408138275, 0.029917050153017044, 0.5113371014595032, -0.005773238372057676, -0.032167818397283554, -0.04791024699807167, 0.08887545019388199, -0.08699138462543488, -0.06530822068452835, 0.14936411380767822, -0.04669041931629181, -0.06669055670499802, -0.003912182059139013, 0.00798267312347889, 0.05850083380937576, -0.08522389829158783, -0.0006025467882864177, -0.019505752250552177, -0.09833164513111115, 0.07302223891019821, -0.044856470078229904, 0.14866553246974945, -0.07762790471315384, 0.14861491322517395, -0.0051532224752008915, 0.05315791070461273, 0.047877173870801926, -0.06557276844978333, -0.010848157107830048, 0.08348732441663742, -0.1114581823348999, 0.004870629403740168, -0.18115513026714325, -0.11493467539548874, 0.07886268198490143, -0.01699104718863964, -0.00638518575578928, 0.06516644358634949, 0.0313112847507, -0.051628272980451584, 0.005724337417632341, 0.06674487143754959, 0.07344836741685867, 0.0700073093175888, -0.15561439096927643, -0.01771870069205761, 0.09690675884485245, -0.07869112491607666, 0.018762825056910515, -0.05430315062403679, 0.023058120161294937, 0.059282951056957245, -0.0029596297536045313, 0.08397168666124344, -0.007842774502933025, 0.09874818474054337, -0.0474141500890255, 0.02346094697713852, -0.0060281106270849705, 0.03885010629892349, -0.13310036063194275, -0.11454498022794724, -0.00001665133095229976, -0.03159913793206215, 0.17035523056983948, 0.06375601887702942, -0.05580395460128784, 0.005372924730181694, -0.05255065858364105, -0.03462313860654831, 0.013480247929692268, -0.14077647030353546, -0.11666209995746613, -0.036639586091041565, 0.06434091180562973, 0.042016394436359406, 0.025115936994552612, 0.0038805841468274593, 0.010499613359570503, -0.012705747969448566, 0.05909089371562004, 0.10644147545099258, 0.08330186456441879, -0.22670134902000427, 0.08762448281049728, 0.10833954066038132, 0.06259223818778992, 0.09415663033723831, 0.03322692587971687, -0.06064571440219879, 0.035262856632471085, 0.007240348029881716, 0.0013709634076803923, 0.08603901416063309, -0.2265183925628662, 0.05303393304347992, 0.1031196191906929, 0.0286395363509655, 0.046034667640924454, 0.009122079238295555, -0.02807924710214138, -0.017987394705414772, -0.007967269979417324, -0.10995720326900482, 0.05144355818629265, 0.07192615419626236, 0.15002834796905518, 0.08390768617391586, 0.02783886529505253, -0.040623988956213, 0.024809451773762703, -0.12266283482313156, 0.17381949722766876, -0.009623749181628227, -0.20247282087802887, -0.02593977563083172, 0.0262367594987154, 0.05791729688644409, 0.0375271812081337, -0.028646651655435562, -0.12916451692581177, 0.019288117066025734, -0.005462628323584795, 0.02926729805767536, 0.09207601100206375, 0.024746710434556007, 0.013610529713332653, 0.115613654255867, -0.0056486474350094795, 0.00932484120130539, -0.04521513357758522, -0.08408595621585846, 0.0564570426940918, 0.14848323166370392, -0.17871716618537903, 0.07128672301769257, 0.22135478258132935, -0.059326209127902985, -0.03674635291099548, -0.056857749819755554, 0.14422059059143066, -0.06685732305049896, 0.052580080926418304, 0.08309068530797958, 0.08587218821048737, -0.05006235092878342, 0.17187237739562988, 0.060203876346349716, -0.07215505093336105, 0.02160119079053402, 0.07876716554164886, -0.10697254538536072, -0.26736798882484436, -0.08840049803256989, -0.04971764609217644, 0.15267719328403473, 0.06546059250831604, 0.04132713004946709, 0.10471638292074203, 0.0773920789361, 0.14449384808540344, 0.1281028389930725, -0.0759710893034935, 0.01144410390406847, 0.07952596992254257, -0.06892181187868118, 0.09866566956043243, 0.014354133978486061, -0.08817584067583084, 0.04443832114338875, 0.13283446431159973, 0.2398686558008194, 0.18415142595767975, 0.1132478341460228, 0.01657783053815365, -0.14903825521469116, 0.09710821509361267, 0.1205405667424202, 0.10970208793878555, -0.02087610587477684, -0.05036766082048416, -0.06147289276123047, -0.02136547863483429, -0.011941651813685894, 0.05810873955488205, -0.05428150296211243, -0.012401970103383064, -0.05803779512643814, 0.1143713966012001, 0.03840358927845955, 0.1350158005952835, -0.08241687715053558, 0.06875564903020859, 0.09566496312618256, -0.019453028216958046, -0.07241249829530716, 0.0701598972082138, 0.1889469176530838, -0.07965157181024551, -0.07735876739025116, -0.0073112668469548225, 0.10279161483049393, -0.029974112287163734, 0.12424670904874802, -0.09854848682880402, -0.30564120411872864, -0.0808442085981369, 0.11386474221944809, -0.006064603105187416, 0.24417506158351898, 0.02402835339307785, -0.12149852514266968, -0.11719180643558502, -0.13533204793930054, 0.04109940677881241, 0.19031549990177155, 0.16438859701156616, 0.08393929153680801, -0.04789695516228676, -0.1992485076189041, -0.166148841381073, -0.04472675547003746, 0.03921573609113693, -0.04906104877591133, 0.06658811867237091, 0.01596997305750847, 0.04635308310389519, -0.1217067539691925, -0.03732677921652794, -0.03412066027522087, 0.0902794674038887, 0.0008389758877456188, -0.0026751847472041845, -0.00621184753254056, -0.013204739429056644, 0.05037013441324234, 0.0637114942073822, 0.0334465391933918, -0.055513083934783936, -0.060023896396160126, -0.09716493636369705, 0.06764756888151169, 0.1207413300871849, -0.03805011138319969, -0.019571594893932343, -0.020205797627568245, -0.10671422630548477, -0.01883365772664547, -0.13897588849067688, 0.15989220142364502, -0.06323352456092834, -0.039009660482406616, -0.09250485897064209, 0.06224915757775307, -0.11667633056640625, 0.17117205262184143, -0.0020866324193775654, 0.05030025541782379, -0.035363152623176575, -0.014787480235099792, 0.19105596840381622, -0.004445730708539486, -0.04571707174181938, 0.240134134888649, -0.14117921888828278, -0.0973270833492279, -0.0299626924097538, 0.03842166066169739, 0.24231086671352386, 0.24277758598327637, -0.009757129475474358, 0.04987549036741257, 0.07449718564748764, -0.028661780059337616, -0.34421759843826294, 0.16047944128513336, -0.011430674232542515, -0.0187949538230896, 0.048425137996673584, -0.15374711155891418, 0.2370518445968628, 0.19957855343818665, -0.02161463536322117, 0.24109557271003723, -0.2047388255596161, -0.05382630601525307, 0.04201637953519821, -0.03804898262023926, 0.27060720324516296, -0.21110741794109344, -0.10297428071498871, -0.06210147961974144, -0.12765532732009888, 0.04491213709115982, -0.11817698925733566, 0.11779867112636566, -0.101518914103508, 0.019347740337252617, 0.06964358687400818, -0.054056908935308456, 0.1679501235485077, 0.11154863238334656, 0.03842892497777939, 0.028748830780386925, -0.23142151534557343, 0.1571972668170929, 0.00822226982563734, 0.022764181718230247, 0.129420205950737, -0.040925540030002594, -0.1882331669330597, 0.01241728849709034, -0.12216119468212128, 0.04756860062479973, 0.08353793621063232, -0.1316514015197754, -0.15829436480998993, 0.0182658638805151, 0.009576506912708282, 0.05976837873458862, 0.30707067251205444, -0.14676928520202637, 0.01767723262310028, -0.04841866344213486, 0.05954473465681076, -0.02949516288936138, -0.07910122722387314, 0.08019253611564636, -0.06404503434896469, 0.024526512250304222, -0.24659870564937592, 0.010455836541950703, 0.12772822380065918, 0.046293120831251144, 0.020790811628103256, 0.10163341462612152, 0.019590366631746292, 0.03891335800290108, 0.048010073602199554, -0.08724313974380493, 0.016466975212097168, 0.009504713118076324, -0.037161942571401596, -0.03386376053094864, -0.05928100645542145, 0.04054137319326401, 0.07189573347568512, -0.035443175584077835, -0.058666497468948364, 0.02949346974492073, -0.11710096150636673, 0.18997398018836975, 0.08202967047691345, 0.11612245440483093, -0.021230924874544144, -0.04282204806804657, 0.1619124859571457, -0.010495064780116081, -0.05845634639263153, 0.13247676193714142, -0.10739164054393768, -0.08631638437509537, -0.0193967055529356, 0.0903114378452301, 0.057511549443006516, 0.06591656804084778, -0.012680009007453918, -0.10163631290197372, 0.040918949991464615, 0.22560371458530426, -0.0029367555398494005, -0.06702074408531189, -0.002984610851854086, -0.06319303810596466, -0.0313231535255909, 0.08136910200119019, -0.25598984956741333, 0.1030777171254158, -0.0177919901907444, -0.07181841880083084, -0.10312777012586594, 0.10335449129343033, -0.09512250870466232, 0.041944265365600586, -0.2269163578748703, 0.00282493163831532, -0.16835631430149078, 0.09669129550457001, -0.002958513330668211, -0.1185954138636589, 0.006806171964854002, 0.052140217274427414, 0.0029779267497360706, -0.0689888522028923, -0.09425155818462372, -0.023103684186935425, -0.02213273197412491, 0.10126078128814697, 0.04955478385090828, -0.07130881398916245, 0.07711688429117203, -0.024116091430187225, 0.11388013511896133, 0.04722127690911293, 0.05801330506801605, 0.0003796280943788588, 0.006757513154298067, -0.13359901309013367, 0.18920835852622986, 0.12301261723041534, 0.0167952049523592, -0.0778532549738884, -0.035579290241003036, -0.04859702289104462, 0.05587686598300934, 0.15854156017303467, -0.05306295305490494, -0.03697117418050766, -0.049601271748542786, -0.12917301058769226, -0.0875515565276146, 0.014990606345236301, -0.1103346198797226, 0.04823610559105873, -0.027099398896098137, 0.03232285752892494, 0.007560265250504017, 0.015360563062131405, -0.13281144201755524, -0.023428384214639664, -0.05900156870484352, -0.1459951549768448, 0.011396575719118118, 0.006120767444372177, 0.0975574478507042, 0.015417126938700676, 0.26962000131607056, 0.03277378901839256, -0.13899724185466766, 0.05513179674744606, 0.18194912374019623, 0.17967592179775238, 0.0399477556347847, 0.3241240978240967, 0.1452806144952774, -0.10133417695760727, -0.01638903096318245, 0.007865766994655132, 0.09885278344154358, 0.04696734622120857, 0.19815638661384583, 0.21324661374092102, -0.11206180602312088, -0.062372561544179916, -0.009283974766731262, -0.013725392520427704, -0.06014943867921829, -0.03860223665833473, 0.08797411620616913, -0.0411340594291687, 0.04316768795251846, 0.00944027490913868, 0.12310025840997696, -0.15559987723827362, 0.0925770178437233, -0.11558768898248672, -0.03329894691705704, -0.05092226713895798, 0.031125951558351517, -0.0648554190993309, -0.06446526944637299, -0.0430959053337574, -0.09071166813373566, -0.08567045629024506, 0.22049519419670105, 0.010049846023321152, 0.12520571053028107, 0.19526831805706024, -0.2164727747440338, 0.057959575206041336, 0.015752607956528664, 0.016537394374608994, -0.06556059420108795, -0.13947656750679016, -0.030838221311569214, 0.04261184483766556, -0.0763116106390953, -0.009146347641944885, -0.0195450596511364, -0.12117709964513779, -0.09180193394422531, -0.12326209992170334, -0.08699729293584824, -0.028413332998752594, -0.05307294800877571, -0.19216831028461456, 0.06966570764780045, 0.0762210264801979, -0.10709823668003082, -0.009199929423630238, 0.21690379083156586, 0.008888334967195988, -0.0900919958949089, -0.0019584072288125753, 0.18416336178779602, -0.015602979809045792, 0.06901753693819046, -0.09618207067251205, -0.10946144163608551, -0.015833595767617226, 0.23110176622867584, 0.1447703093290329, -0.0837630033493042, -0.0030498981941491365, -0.002775115193799138, 0.016392167657613754, 0.07736723870038986, 0.10568308085203171, 0.0294676311314106, 0.07153522968292236, -0.047846417874097824, -0.012350926175713539, 0.019595475867390633, -0.09724830090999603, -0.2527076005935669, -0.04380790516734123, 0.15591812133789062, -0.002275610575452447, -0.1326061189174652, 0.06468839198350906, -0.10411272197961807, 0.22602081298828125, 0.11453062295913696, -0.17882703244686127, -0.053525857627391815, -0.11780168861150742, -0.12279555946588516, -0.08770149201154709, 0.027026301249861717, -0.08970041573047638, -0.03829335793852806, -0.1752849519252777, 0.00747827161103487, -0.2676019072532654, -0.06815265119075775, 0.051068760454654694, 0.04222187399864197, 0.14773456752300262, -0.00854822713881731, 0.08545815944671631, 0.04207441210746765, -0.10040275007486343, -0.0435626246035099, 0.10802225023508072, -0.009157123044133186, 0.04626893252134323, -0.03964017331600189, 0.16638602316379547, -0.010081112384796143, -0.018456656485795975, 0.07953447103500366, -0.1916263997554779, -0.09317836165428162, 0.13054321706295013, 0.030281418934464455, -0.09281744807958603, -0.04570736736059189, -0.040321893990039825, 0.033325690776109695, 0.05892430990934372, 0.003395460080355406, 0.0429726205766201, -0.08651977777481079, -0.0003660069196484983, 0.07216564565896988, -0.029649844393134117, -0.17508728802204132, -0.09671329706907272, 0.004131820052862167, -0.06090554967522621, 0.012257876805961132, -0.3624148368835449, -0.05451919510960579, -0.11735428124666214, 0.07305483520030975, -0.030321264639496803, -0.025620967149734497, 0.0788007602095604, -0.06739693135023117, -0.022672632709145546, -0.3406965136528015, 0.0446820966899395, 0.15343886613845825, -0.08648160845041275, -0.09428948163986206 ]
6d37c66c72d4d69cfe13a377ea41bd76014ef156
# Dataset Card for Evaluation run of sethuiyer/Nandine-7b <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [sethuiyer/Nandine-7b](https://huggingface.co/sethuiyer/Nandine-7b) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_sethuiyer__Nandine-7b", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-01-25T14:10:10.967480](https://huggingface.co/datasets/open-llm-leaderboard/details_sethuiyer__Nandine-7b/blob/main/results_2024-01-25T14-10-10.967480.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.6518690929176946, "acc_stderr": 0.03205655779902747, "acc_norm": 0.6527904357270478, "acc_norm_stderr": 0.032712263430470095, "mc1": 0.46511627906976744, "mc1_stderr": 0.017460849975873965, "mc2": 0.621045026787777, "mc2_stderr": 0.015400019607552454 }, "harness|arc:challenge|25": { "acc": 0.6638225255972696, "acc_stderr": 0.01380485502620576, "acc_norm": 0.6928327645051194, "acc_norm_stderr": 0.013481034054980941 }, "harness|hellaswag|10": { "acc": 0.6926906990639314, "acc_stderr": 0.004604357610190314, "acc_norm": 0.8701453893646683, "acc_norm_stderr": 0.003354564257491871 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.33, "acc_stderr": 0.04725815626252606, "acc_norm": 0.33, "acc_norm_stderr": 0.04725815626252606 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.6222222222222222, "acc_stderr": 0.04188307537595852, "acc_norm": 0.6222222222222222, "acc_norm_stderr": 0.04188307537595852 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.6842105263157895, "acc_stderr": 0.0378272898086547, "acc_norm": 0.6842105263157895, "acc_norm_stderr": 0.0378272898086547 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.61, "acc_stderr": 0.04902071300001975, "acc_norm": 0.61, "acc_norm_stderr": 0.04902071300001975 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.7094339622641509, "acc_stderr": 0.027943219989337142, "acc_norm": 0.7094339622641509, "acc_norm_stderr": 0.027943219989337142 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.7638888888888888, "acc_stderr": 0.03551446610810826, "acc_norm": 0.7638888888888888, "acc_norm_stderr": 0.03551446610810826 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.46, "acc_stderr": 0.05009082659620332, "acc_norm": 0.46, "acc_norm_stderr": 0.05009082659620332 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.56, "acc_stderr": 0.049888765156985884, "acc_norm": 0.56, "acc_norm_stderr": 0.049888765156985884 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.33, "acc_stderr": 0.04725815626252604, "acc_norm": 0.33, "acc_norm_stderr": 0.04725815626252604 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.6589595375722543, "acc_stderr": 0.036146654241808254, "acc_norm": 0.6589595375722543, "acc_norm_stderr": 0.036146654241808254 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.4117647058823529, "acc_stderr": 0.04897104952726367, "acc_norm": 0.4117647058823529, "acc_norm_stderr": 0.04897104952726367 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.8, "acc_stderr": 0.04020151261036845, "acc_norm": 0.8, "acc_norm_stderr": 0.04020151261036845 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.5914893617021276, "acc_stderr": 0.032134180267015755, "acc_norm": 0.5914893617021276, "acc_norm_stderr": 0.032134180267015755 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.5, "acc_stderr": 0.047036043419179864, "acc_norm": 0.5, "acc_norm_stderr": 0.047036043419179864 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.5862068965517241, "acc_stderr": 0.04104269211806232, "acc_norm": 0.5862068965517241, "acc_norm_stderr": 0.04104269211806232 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.4021164021164021, "acc_stderr": 0.025253032554997692, "acc_norm": 0.4021164021164021, "acc_norm_stderr": 0.025253032554997692 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.46825396825396826, "acc_stderr": 0.04463112720677172, "acc_norm": 0.46825396825396826, "acc_norm_stderr": 0.04463112720677172 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.35, "acc_stderr": 0.047937248544110196, "acc_norm": 0.35, "acc_norm_stderr": 0.047937248544110196 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.7677419354838709, "acc_stderr": 0.024022256130308235, "acc_norm": 0.7677419354838709, "acc_norm_stderr": 0.024022256130308235 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.5221674876847291, "acc_stderr": 0.03514528562175007, "acc_norm": 0.5221674876847291, "acc_norm_stderr": 0.03514528562175007 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.67, "acc_stderr": 0.04725815626252607, "acc_norm": 0.67, "acc_norm_stderr": 0.04725815626252607 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.7818181818181819, "acc_stderr": 0.03225078108306289, "acc_norm": 0.7818181818181819, "acc_norm_stderr": 0.03225078108306289 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.8181818181818182, "acc_stderr": 0.027479603010538804, "acc_norm": 0.8181818181818182, "acc_norm_stderr": 0.027479603010538804 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.8963730569948186, "acc_stderr": 0.02199531196364424, "acc_norm": 0.8963730569948186, "acc_norm_stderr": 0.02199531196364424 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.6692307692307692, "acc_stderr": 0.023854795680971118, "acc_norm": 0.6692307692307692, "acc_norm_stderr": 0.023854795680971118 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.34814814814814815, "acc_stderr": 0.029045600290616255, "acc_norm": 0.34814814814814815, "acc_norm_stderr": 0.029045600290616255 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.7142857142857143, "acc_stderr": 0.02934457250063433, "acc_norm": 0.7142857142857143, "acc_norm_stderr": 0.02934457250063433 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.33112582781456956, "acc_stderr": 0.038425817186598696, "acc_norm": 0.33112582781456956, "acc_norm_stderr": 0.038425817186598696 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.8348623853211009, "acc_stderr": 0.015919557829976044, "acc_norm": 0.8348623853211009, "acc_norm_stderr": 0.015919557829976044 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.5138888888888888, "acc_stderr": 0.03408655867977749, "acc_norm": 0.5138888888888888, "acc_norm_stderr": 0.03408655867977749 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.8284313725490197, "acc_stderr": 0.026460569561240644, "acc_norm": 0.8284313725490197, "acc_norm_stderr": 0.026460569561240644 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.810126582278481, "acc_stderr": 0.02553010046023349, "acc_norm": 0.810126582278481, "acc_norm_stderr": 0.02553010046023349 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.672645739910314, "acc_stderr": 0.03149384670994131, "acc_norm": 0.672645739910314, "acc_norm_stderr": 0.03149384670994131 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.7862595419847328, "acc_stderr": 0.0359546161177469, "acc_norm": 0.7862595419847328, "acc_norm_stderr": 0.0359546161177469 }, "harness|hendrycksTest-international_law|5": { "acc": 0.7933884297520661, "acc_stderr": 0.03695980128098824, "acc_norm": 0.7933884297520661, "acc_norm_stderr": 0.03695980128098824 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.7962962962962963, "acc_stderr": 0.03893542518824847, "acc_norm": 0.7962962962962963, "acc_norm_stderr": 0.03893542518824847 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.7852760736196319, "acc_stderr": 0.032262193772867744, "acc_norm": 0.7852760736196319, "acc_norm_stderr": 0.032262193772867744 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.45535714285714285, "acc_stderr": 0.047268355537191, "acc_norm": 0.45535714285714285, "acc_norm_stderr": 0.047268355537191 }, "harness|hendrycksTest-management|5": { "acc": 0.7766990291262136, "acc_stderr": 0.04123553189891431, "acc_norm": 0.7766990291262136, "acc_norm_stderr": 0.04123553189891431 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8760683760683761, "acc_stderr": 0.021586494001281365, "acc_norm": 0.8760683760683761, "acc_norm_stderr": 0.021586494001281365 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.74, "acc_stderr": 0.04408440022768078, "acc_norm": 0.74, "acc_norm_stderr": 0.04408440022768078 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.822477650063857, "acc_stderr": 0.013664230995834832, "acc_norm": 0.822477650063857, "acc_norm_stderr": 0.013664230995834832 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.7254335260115607, "acc_stderr": 0.02402774515526502, "acc_norm": 0.7254335260115607, "acc_norm_stderr": 0.02402774515526502 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.4480446927374302, "acc_stderr": 0.016631976628930595, "acc_norm": 0.4480446927374302, "acc_norm_stderr": 0.016631976628930595 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.7189542483660131, "acc_stderr": 0.025738854797818733, "acc_norm": 0.7189542483660131, "acc_norm_stderr": 0.025738854797818733 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.7041800643086816, "acc_stderr": 0.025922371788818767, "acc_norm": 0.7041800643086816, "acc_norm_stderr": 0.025922371788818767 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.7407407407407407, "acc_stderr": 0.02438366553103545, "acc_norm": 0.7407407407407407, "acc_norm_stderr": 0.02438366553103545 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.4645390070921986, "acc_stderr": 0.029752389657427047, "acc_norm": 0.4645390070921986, "acc_norm_stderr": 0.029752389657427047 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.46936114732724904, "acc_stderr": 0.012746237711716634, "acc_norm": 0.46936114732724904, "acc_norm_stderr": 0.012746237711716634 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.6617647058823529, "acc_stderr": 0.028739328513983572, "acc_norm": 0.6617647058823529, "acc_norm_stderr": 0.028739328513983572 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.6633986928104575, "acc_stderr": 0.019117213911495148, "acc_norm": 0.6633986928104575, "acc_norm_stderr": 0.019117213911495148 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.7, "acc_stderr": 0.04389311454644287, "acc_norm": 0.7, "acc_norm_stderr": 0.04389311454644287 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.7551020408163265, "acc_stderr": 0.027529637440174923, "acc_norm": 0.7551020408163265, "acc_norm_stderr": 0.027529637440174923 }, "harness|hendrycksTest-sociology|5": { "acc": 0.8407960199004975, "acc_stderr": 0.02587064676616914, "acc_norm": 0.8407960199004975, "acc_norm_stderr": 0.02587064676616914 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.82, "acc_stderr": 0.038612291966536934, "acc_norm": 0.82, "acc_norm_stderr": 0.038612291966536934 }, "harness|hendrycksTest-virology|5": { "acc": 0.536144578313253, "acc_stderr": 0.038823108508905954, "acc_norm": 0.536144578313253, "acc_norm_stderr": 0.038823108508905954 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.8538011695906432, "acc_stderr": 0.027097290118070806, "acc_norm": 0.8538011695906432, "acc_norm_stderr": 0.027097290118070806 }, "harness|truthfulqa:mc|0": { "mc1": 0.46511627906976744, "mc1_stderr": 0.017460849975873965, "mc2": 0.621045026787777, "mc2_stderr": 0.015400019607552454 }, "harness|winogrande|5": { "acc": 0.8318863456985004, "acc_stderr": 0.010510336954166746 }, "harness|gsm8k|5": { "acc": 0.6239575435936315, "acc_stderr": 0.013342532064849772 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_sethuiyer__Nandine-7b
[ "region:us" ]
2024-01-25T14:12:29+00:00
{"pretty_name": "Evaluation run of sethuiyer/Nandine-7b", "dataset_summary": "Dataset automatically created during the evaluation run of model [sethuiyer/Nandine-7b](https://huggingface.co/sethuiyer/Nandine-7b) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_sethuiyer__Nandine-7b\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-01-25T14:10:10.967480](https://huggingface.co/datasets/open-llm-leaderboard/details_sethuiyer__Nandine-7b/blob/main/results_2024-01-25T14-10-10.967480.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.6518690929176946,\n \"acc_stderr\": 0.03205655779902747,\n \"acc_norm\": 0.6527904357270478,\n \"acc_norm_stderr\": 0.032712263430470095,\n \"mc1\": 0.46511627906976744,\n \"mc1_stderr\": 0.017460849975873965,\n \"mc2\": 0.621045026787777,\n \"mc2_stderr\": 0.015400019607552454\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.6638225255972696,\n \"acc_stderr\": 0.01380485502620576,\n \"acc_norm\": 0.6928327645051194,\n \"acc_norm_stderr\": 0.013481034054980941\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6926906990639314,\n \"acc_stderr\": 0.004604357610190314,\n \"acc_norm\": 0.8701453893646683,\n \"acc_norm_stderr\": 0.003354564257491871\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.33,\n \"acc_stderr\": 0.04725815626252606,\n \"acc_norm\": 0.33,\n \"acc_norm_stderr\": 0.04725815626252606\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.6222222222222222,\n \"acc_stderr\": 0.04188307537595852,\n \"acc_norm\": 0.6222222222222222,\n \"acc_norm_stderr\": 0.04188307537595852\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.6842105263157895,\n \"acc_stderr\": 0.0378272898086547,\n \"acc_norm\": 0.6842105263157895,\n \"acc_norm_stderr\": 0.0378272898086547\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.61,\n \"acc_stderr\": 0.04902071300001975,\n \"acc_norm\": 0.61,\n \"acc_norm_stderr\": 0.04902071300001975\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.7094339622641509,\n \"acc_stderr\": 0.027943219989337142,\n \"acc_norm\": 0.7094339622641509,\n \"acc_norm_stderr\": 0.027943219989337142\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.7638888888888888,\n \"acc_stderr\": 0.03551446610810826,\n \"acc_norm\": 0.7638888888888888,\n \"acc_norm_stderr\": 0.03551446610810826\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.46,\n \"acc_stderr\": 0.05009082659620332,\n \"acc_norm\": 0.46,\n \"acc_norm_stderr\": 0.05009082659620332\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.56,\n \"acc_stderr\": 0.049888765156985884,\n \"acc_norm\": 0.56,\n \"acc_norm_stderr\": 0.049888765156985884\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.33,\n \"acc_stderr\": 0.04725815626252604,\n \"acc_norm\": 0.33,\n \"acc_norm_stderr\": 0.04725815626252604\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.6589595375722543,\n \"acc_stderr\": 0.036146654241808254,\n \"acc_norm\": 0.6589595375722543,\n \"acc_norm_stderr\": 0.036146654241808254\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.4117647058823529,\n \"acc_stderr\": 0.04897104952726367,\n \"acc_norm\": 0.4117647058823529,\n \"acc_norm_stderr\": 0.04897104952726367\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.8,\n \"acc_stderr\": 0.04020151261036845,\n \"acc_norm\": 0.8,\n \"acc_norm_stderr\": 0.04020151261036845\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.5914893617021276,\n \"acc_stderr\": 0.032134180267015755,\n \"acc_norm\": 0.5914893617021276,\n \"acc_norm_stderr\": 0.032134180267015755\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.5,\n \"acc_stderr\": 0.047036043419179864,\n \"acc_norm\": 0.5,\n \"acc_norm_stderr\": 0.047036043419179864\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.5862068965517241,\n \"acc_stderr\": 0.04104269211806232,\n \"acc_norm\": 0.5862068965517241,\n \"acc_norm_stderr\": 0.04104269211806232\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.4021164021164021,\n \"acc_stderr\": 0.025253032554997692,\n \"acc_norm\": 0.4021164021164021,\n \"acc_norm_stderr\": 0.025253032554997692\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.46825396825396826,\n \"acc_stderr\": 0.04463112720677172,\n \"acc_norm\": 0.46825396825396826,\n \"acc_norm_stderr\": 0.04463112720677172\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.35,\n \"acc_stderr\": 0.047937248544110196,\n \"acc_norm\": 0.35,\n \"acc_norm_stderr\": 0.047937248544110196\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.7677419354838709,\n \"acc_stderr\": 0.024022256130308235,\n \"acc_norm\": 0.7677419354838709,\n \"acc_norm_stderr\": 0.024022256130308235\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.5221674876847291,\n \"acc_stderr\": 0.03514528562175007,\n \"acc_norm\": 0.5221674876847291,\n \"acc_norm_stderr\": 0.03514528562175007\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.67,\n \"acc_stderr\": 0.04725815626252607,\n \"acc_norm\": 0.67,\n \"acc_norm_stderr\": 0.04725815626252607\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.7818181818181819,\n \"acc_stderr\": 0.03225078108306289,\n \"acc_norm\": 0.7818181818181819,\n \"acc_norm_stderr\": 0.03225078108306289\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.8181818181818182,\n \"acc_stderr\": 0.027479603010538804,\n \"acc_norm\": 0.8181818181818182,\n \"acc_norm_stderr\": 0.027479603010538804\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.8963730569948186,\n \"acc_stderr\": 0.02199531196364424,\n \"acc_norm\": 0.8963730569948186,\n \"acc_norm_stderr\": 0.02199531196364424\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.6692307692307692,\n \"acc_stderr\": 0.023854795680971118,\n \"acc_norm\": 0.6692307692307692,\n \"acc_norm_stderr\": 0.023854795680971118\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.34814814814814815,\n \"acc_stderr\": 0.029045600290616255,\n \"acc_norm\": 0.34814814814814815,\n \"acc_norm_stderr\": 0.029045600290616255\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.7142857142857143,\n \"acc_stderr\": 0.02934457250063433,\n \"acc_norm\": 0.7142857142857143,\n \"acc_norm_stderr\": 0.02934457250063433\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.33112582781456956,\n \"acc_stderr\": 0.038425817186598696,\n \"acc_norm\": 0.33112582781456956,\n \"acc_norm_stderr\": 0.038425817186598696\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.8348623853211009,\n \"acc_stderr\": 0.015919557829976044,\n \"acc_norm\": 0.8348623853211009,\n \"acc_norm_stderr\": 0.015919557829976044\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.5138888888888888,\n \"acc_stderr\": 0.03408655867977749,\n \"acc_norm\": 0.5138888888888888,\n \"acc_norm_stderr\": 0.03408655867977749\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.8284313725490197,\n \"acc_stderr\": 0.026460569561240644,\n \"acc_norm\": 0.8284313725490197,\n \"acc_norm_stderr\": 0.026460569561240644\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.810126582278481,\n \"acc_stderr\": 0.02553010046023349,\n \"acc_norm\": 0.810126582278481,\n \"acc_norm_stderr\": 0.02553010046023349\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.672645739910314,\n \"acc_stderr\": 0.03149384670994131,\n \"acc_norm\": 0.672645739910314,\n \"acc_norm_stderr\": 0.03149384670994131\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.7862595419847328,\n \"acc_stderr\": 0.0359546161177469,\n \"acc_norm\": 0.7862595419847328,\n \"acc_norm_stderr\": 0.0359546161177469\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.7933884297520661,\n \"acc_stderr\": 0.03695980128098824,\n \"acc_norm\": 0.7933884297520661,\n \"acc_norm_stderr\": 0.03695980128098824\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.7962962962962963,\n \"acc_stderr\": 0.03893542518824847,\n \"acc_norm\": 0.7962962962962963,\n \"acc_norm_stderr\": 0.03893542518824847\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.7852760736196319,\n \"acc_stderr\": 0.032262193772867744,\n \"acc_norm\": 0.7852760736196319,\n \"acc_norm_stderr\": 0.032262193772867744\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.45535714285714285,\n \"acc_stderr\": 0.047268355537191,\n \"acc_norm\": 0.45535714285714285,\n \"acc_norm_stderr\": 0.047268355537191\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.7766990291262136,\n \"acc_stderr\": 0.04123553189891431,\n \"acc_norm\": 0.7766990291262136,\n \"acc_norm_stderr\": 0.04123553189891431\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8760683760683761,\n \"acc_stderr\": 0.021586494001281365,\n \"acc_norm\": 0.8760683760683761,\n \"acc_norm_stderr\": 0.021586494001281365\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.74,\n \"acc_stderr\": 0.04408440022768078,\n \"acc_norm\": 0.74,\n \"acc_norm_stderr\": 0.04408440022768078\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.822477650063857,\n \"acc_stderr\": 0.013664230995834832,\n \"acc_norm\": 0.822477650063857,\n \"acc_norm_stderr\": 0.013664230995834832\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.7254335260115607,\n \"acc_stderr\": 0.02402774515526502,\n \"acc_norm\": 0.7254335260115607,\n \"acc_norm_stderr\": 0.02402774515526502\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.4480446927374302,\n \"acc_stderr\": 0.016631976628930595,\n \"acc_norm\": 0.4480446927374302,\n \"acc_norm_stderr\": 0.016631976628930595\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.7189542483660131,\n \"acc_stderr\": 0.025738854797818733,\n \"acc_norm\": 0.7189542483660131,\n \"acc_norm_stderr\": 0.025738854797818733\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.7041800643086816,\n \"acc_stderr\": 0.025922371788818767,\n \"acc_norm\": 0.7041800643086816,\n \"acc_norm_stderr\": 0.025922371788818767\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.7407407407407407,\n \"acc_stderr\": 0.02438366553103545,\n \"acc_norm\": 0.7407407407407407,\n \"acc_norm_stderr\": 0.02438366553103545\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.4645390070921986,\n \"acc_stderr\": 0.029752389657427047,\n \"acc_norm\": 0.4645390070921986,\n \"acc_norm_stderr\": 0.029752389657427047\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.46936114732724904,\n \"acc_stderr\": 0.012746237711716634,\n \"acc_norm\": 0.46936114732724904,\n \"acc_norm_stderr\": 0.012746237711716634\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.6617647058823529,\n \"acc_stderr\": 0.028739328513983572,\n \"acc_norm\": 0.6617647058823529,\n \"acc_norm_stderr\": 0.028739328513983572\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.6633986928104575,\n \"acc_stderr\": 0.019117213911495148,\n \"acc_norm\": 0.6633986928104575,\n \"acc_norm_stderr\": 0.019117213911495148\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.7,\n \"acc_stderr\": 0.04389311454644287,\n \"acc_norm\": 0.7,\n \"acc_norm_stderr\": 0.04389311454644287\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.7551020408163265,\n \"acc_stderr\": 0.027529637440174923,\n \"acc_norm\": 0.7551020408163265,\n \"acc_norm_stderr\": 0.027529637440174923\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.8407960199004975,\n \"acc_stderr\": 0.02587064676616914,\n \"acc_norm\": 0.8407960199004975,\n \"acc_norm_stderr\": 0.02587064676616914\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.82,\n \"acc_stderr\": 0.038612291966536934,\n \"acc_norm\": 0.82,\n \"acc_norm_stderr\": 0.038612291966536934\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.536144578313253,\n \"acc_stderr\": 0.038823108508905954,\n \"acc_norm\": 0.536144578313253,\n \"acc_norm_stderr\": 0.038823108508905954\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.8538011695906432,\n \"acc_stderr\": 0.027097290118070806,\n \"acc_norm\": 0.8538011695906432,\n \"acc_norm_stderr\": 0.027097290118070806\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.46511627906976744,\n \"mc1_stderr\": 0.017460849975873965,\n \"mc2\": 0.621045026787777,\n \"mc2_stderr\": 0.015400019607552454\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.8318863456985004,\n \"acc_stderr\": 0.010510336954166746\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.6239575435936315,\n \"acc_stderr\": 0.013342532064849772\n }\n}\n```", "repo_url": "https://huggingface.co/sethuiyer/Nandine-7b", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_01_25T14_10_10.967480", "path": ["**/details_harness|arc:challenge|25_2024-01-25T14-10-10.967480.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-01-25T14-10-10.967480.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_01_25T14_10_10.967480", "path": ["**/details_harness|gsm8k|5_2024-01-25T14-10-10.967480.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-01-25T14-10-10.967480.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_01_25T14_10_10.967480", "path": ["**/details_harness|hellaswag|10_2024-01-25T14-10-10.967480.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-01-25T14-10-10.967480.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_01_25T14_10_10.967480", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-25T14-10-10.967480.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-25T14-10-10.967480.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-25T14-10-10.967480.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-25T14-10-10.967480.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-25T14-10-10.967480.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-25T14-10-10.967480.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-25T14-10-10.967480.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-25T14-10-10.967480.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-25T14-10-10.967480.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-25T14-10-10.967480.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-25T14-10-10.967480.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-25T14-10-10.967480.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-25T14-10-10.967480.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-25T14-10-10.967480.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-25T14-10-10.967480.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-25T14-10-10.967480.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-25T14-10-10.967480.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-25T14-10-10.967480.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-25T14-10-10.967480.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-25T14-10-10.967480.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-25T14-10-10.967480.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-25T14-10-10.967480.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-25T14-10-10.967480.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-25T14-10-10.967480.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-25T14-10-10.967480.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-25T14-10-10.967480.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-25T14-10-10.967480.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-25T14-10-10.967480.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-25T14-10-10.967480.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-25T14-10-10.967480.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-25T14-10-10.967480.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-25T14-10-10.967480.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-25T14-10-10.967480.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-25T14-10-10.967480.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-25T14-10-10.967480.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-25T14-10-10.967480.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-25T14-10-10.967480.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-25T14-10-10.967480.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-25T14-10-10.967480.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-25T14-10-10.967480.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-25T14-10-10.967480.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-25T14-10-10.967480.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-25T14-10-10.967480.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-25T14-10-10.967480.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-25T14-10-10.967480.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-25T14-10-10.967480.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-25T14-10-10.967480.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-25T14-10-10.967480.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-25T14-10-10.967480.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-25T14-10-10.967480.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-25T14-10-10.967480.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-25T14-10-10.967480.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-25T14-10-10.967480.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-25T14-10-10.967480.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-25T14-10-10.967480.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-25T14-10-10.967480.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-25T14-10-10.967480.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-25T14-10-10.967480.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-25T14-10-10.967480.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-25T14-10-10.967480.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-25T14-10-10.967480.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-25T14-10-10.967480.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-25T14-10-10.967480.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-25T14-10-10.967480.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-25T14-10-10.967480.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-25T14-10-10.967480.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-25T14-10-10.967480.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-25T14-10-10.967480.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-25T14-10-10.967480.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-25T14-10-10.967480.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-25T14-10-10.967480.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-25T14-10-10.967480.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-25T14-10-10.967480.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-25T14-10-10.967480.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-25T14-10-10.967480.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-25T14-10-10.967480.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-25T14-10-10.967480.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-25T14-10-10.967480.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-25T14-10-10.967480.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-25T14-10-10.967480.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-25T14-10-10.967480.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-25T14-10-10.967480.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-25T14-10-10.967480.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-25T14-10-10.967480.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-25T14-10-10.967480.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-25T14-10-10.967480.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-25T14-10-10.967480.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-25T14-10-10.967480.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-25T14-10-10.967480.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-25T14-10-10.967480.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-25T14-10-10.967480.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-25T14-10-10.967480.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-25T14-10-10.967480.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-25T14-10-10.967480.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-25T14-10-10.967480.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-25T14-10-10.967480.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-25T14-10-10.967480.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-25T14-10-10.967480.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-25T14-10-10.967480.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-25T14-10-10.967480.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-25T14-10-10.967480.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-25T14-10-10.967480.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-25T14-10-10.967480.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-25T14-10-10.967480.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-25T14-10-10.967480.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-25T14-10-10.967480.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-25T14-10-10.967480.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-25T14-10-10.967480.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-25T14-10-10.967480.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-25T14-10-10.967480.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-25T14-10-10.967480.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-25T14-10-10.967480.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-25T14-10-10.967480.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-25T14-10-10.967480.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_01_25T14_10_10.967480", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-25T14-10-10.967480.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-25T14-10-10.967480.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_01_25T14_10_10.967480", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-25T14-10-10.967480.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-25T14-10-10.967480.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_01_25T14_10_10.967480", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-25T14-10-10.967480.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-25T14-10-10.967480.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_01_25T14_10_10.967480", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-25T14-10-10.967480.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-25T14-10-10.967480.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_01_25T14_10_10.967480", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-25T14-10-10.967480.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-25T14-10-10.967480.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_01_25T14_10_10.967480", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-25T14-10-10.967480.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-25T14-10-10.967480.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_01_25T14_10_10.967480", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-25T14-10-10.967480.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-25T14-10-10.967480.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_01_25T14_10_10.967480", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-25T14-10-10.967480.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-25T14-10-10.967480.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_01_25T14_10_10.967480", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-25T14-10-10.967480.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-25T14-10-10.967480.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_01_25T14_10_10.967480", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-25T14-10-10.967480.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-25T14-10-10.967480.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_01_25T14_10_10.967480", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-25T14-10-10.967480.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-25T14-10-10.967480.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_01_25T14_10_10.967480", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-25T14-10-10.967480.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-25T14-10-10.967480.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_01_25T14_10_10.967480", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-25T14-10-10.967480.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-25T14-10-10.967480.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_01_25T14_10_10.967480", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-25T14-10-10.967480.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-25T14-10-10.967480.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_01_25T14_10_10.967480", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-25T14-10-10.967480.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-25T14-10-10.967480.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_01_25T14_10_10.967480", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-25T14-10-10.967480.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-25T14-10-10.967480.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_01_25T14_10_10.967480", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-25T14-10-10.967480.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-25T14-10-10.967480.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_01_25T14_10_10.967480", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-25T14-10-10.967480.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-25T14-10-10.967480.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_01_25T14_10_10.967480", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-25T14-10-10.967480.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-25T14-10-10.967480.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_01_25T14_10_10.967480", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-25T14-10-10.967480.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-25T14-10-10.967480.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_01_25T14_10_10.967480", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-25T14-10-10.967480.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-25T14-10-10.967480.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_01_25T14_10_10.967480", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-25T14-10-10.967480.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-25T14-10-10.967480.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_01_25T14_10_10.967480", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-25T14-10-10.967480.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-25T14-10-10.967480.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_01_25T14_10_10.967480", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-25T14-10-10.967480.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-25T14-10-10.967480.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_01_25T14_10_10.967480", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-25T14-10-10.967480.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-25T14-10-10.967480.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_01_25T14_10_10.967480", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-25T14-10-10.967480.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-25T14-10-10.967480.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_01_25T14_10_10.967480", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-25T14-10-10.967480.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-25T14-10-10.967480.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_01_25T14_10_10.967480", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-25T14-10-10.967480.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-25T14-10-10.967480.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_01_25T14_10_10.967480", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-25T14-10-10.967480.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-25T14-10-10.967480.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_01_25T14_10_10.967480", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-25T14-10-10.967480.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-25T14-10-10.967480.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_01_25T14_10_10.967480", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-25T14-10-10.967480.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-25T14-10-10.967480.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_01_25T14_10_10.967480", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-25T14-10-10.967480.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-25T14-10-10.967480.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_01_25T14_10_10.967480", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-25T14-10-10.967480.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-25T14-10-10.967480.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_01_25T14_10_10.967480", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-25T14-10-10.967480.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-25T14-10-10.967480.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_01_25T14_10_10.967480", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-25T14-10-10.967480.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-25T14-10-10.967480.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_01_25T14_10_10.967480", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-25T14-10-10.967480.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-25T14-10-10.967480.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_01_25T14_10_10.967480", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-25T14-10-10.967480.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-25T14-10-10.967480.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_01_25T14_10_10.967480", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-25T14-10-10.967480.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-25T14-10-10.967480.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_01_25T14_10_10.967480", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-25T14-10-10.967480.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-25T14-10-10.967480.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_01_25T14_10_10.967480", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-25T14-10-10.967480.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-25T14-10-10.967480.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_01_25T14_10_10.967480", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-25T14-10-10.967480.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-25T14-10-10.967480.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_01_25T14_10_10.967480", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-25T14-10-10.967480.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-25T14-10-10.967480.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_01_25T14_10_10.967480", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-25T14-10-10.967480.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-25T14-10-10.967480.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_01_25T14_10_10.967480", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-25T14-10-10.967480.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-25T14-10-10.967480.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_01_25T14_10_10.967480", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-25T14-10-10.967480.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-25T14-10-10.967480.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_01_25T14_10_10.967480", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-25T14-10-10.967480.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-25T14-10-10.967480.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_01_25T14_10_10.967480", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-25T14-10-10.967480.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-25T14-10-10.967480.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_01_25T14_10_10.967480", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-25T14-10-10.967480.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-25T14-10-10.967480.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_01_25T14_10_10.967480", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-25T14-10-10.967480.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-25T14-10-10.967480.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_01_25T14_10_10.967480", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-25T14-10-10.967480.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-25T14-10-10.967480.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_01_25T14_10_10.967480", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-25T14-10-10.967480.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-25T14-10-10.967480.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_01_25T14_10_10.967480", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-25T14-10-10.967480.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-25T14-10-10.967480.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_01_25T14_10_10.967480", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-25T14-10-10.967480.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-25T14-10-10.967480.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_01_25T14_10_10.967480", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-25T14-10-10.967480.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-25T14-10-10.967480.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_01_25T14_10_10.967480", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-25T14-10-10.967480.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-25T14-10-10.967480.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_01_25T14_10_10.967480", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-25T14-10-10.967480.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-25T14-10-10.967480.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_01_25T14_10_10.967480", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-25T14-10-10.967480.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-25T14-10-10.967480.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_01_25T14_10_10.967480", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-25T14-10-10.967480.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-25T14-10-10.967480.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_01_25T14_10_10.967480", "path": ["**/details_harness|winogrande|5_2024-01-25T14-10-10.967480.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-01-25T14-10-10.967480.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_01_25T14_10_10.967480", "path": ["results_2024-01-25T14-10-10.967480.parquet"]}, {"split": "latest", "path": ["results_2024-01-25T14-10-10.967480.parquet"]}]}]}
2024-01-25T14:12:53+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of sethuiyer/Nandine-7b Dataset automatically created during the evaluation run of model sethuiyer/Nandine-7b on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-01-25T14:10:10.967480(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of sethuiyer/Nandine-7b\n\n\n\nDataset automatically created during the evaluation run of model sethuiyer/Nandine-7b on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-25T14:10:10.967480(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of sethuiyer/Nandine-7b\n\n\n\nDataset automatically created during the evaluation run of model sethuiyer/Nandine-7b on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-25T14:10:10.967480(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ 6, 179, 67, 4, 40, 29, 3, 4, 9, 6, 5, 7, 4, 7, 10, 9, 5, 9, 8, 10, 46, 8, 7, 10, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of sethuiyer/Nandine-7b\n\n\n\nDataset automatically created during the evaluation run of model sethuiyer/Nandine-7b on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2024-01-25T14:10:10.967480(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):## Dataset Details### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:## Uses### Direct Use### Out-of-Scope Use## Dataset Structure## Dataset Creation### Curation Rationale### Source Data#### Data Collection and Processing#### Who are the source data producers?### Annotations [optional]#### Annotation process#### Who are the annotators?#### Personal and Sensitive Information## Bias, Risks, and Limitations### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:## Glossary [optional]## More Information [optional]## Dataset Card Authors [optional]## Dataset Card Contact" ]
[ -0.05137113854289055, 0.19114060699939728, -0.006741493474692106, 0.03931530565023422, 0.06841317564249039, -0.007851445116102695, 0.03728987276554108, 0.10553747415542603, 0.03307691961526871, 0.16863355040550232, -0.008931556716561317, 0.10651696473360062, 0.09163646399974823, 0.10844913870096207, 0.03344818949699402, -0.13852444291114807, 0.047268033027648926, -0.09871771931648254, 0.10645130276679993, 0.06675992906093597, 0.05648062378168106, -0.08456390351057053, 0.06428257375955582, -0.027973774820566177, 0.04032202810049057, -0.006504212040454149, -0.05647042766213417, -0.0402069166302681, 0.11167421191930771, 0.08684871345758438, 0.042438264936208725, -0.021578054875135422, 0.0393802709877491, -0.2773372530937195, 0.018806688487529755, 0.10514264553785324, 0.003425669390708208, 0.04683665186166763, 0.14437060058116913, -0.07913073152303696, 0.08664906769990921, -0.030749743804335594, 0.06639295071363449, 0.05199402943253517, -0.11593550443649292, -0.15342538058757782, -0.14287470281124115, 0.009988819248974323, 0.06879711896181107, 0.04794719070196152, -0.020239606499671936, 0.1128382682800293, -0.045902449637651443, 0.04846698045730591, 0.1417815387248993, -0.17949596047401428, -0.023535296320915222, 0.046321481466293335, 0.020832164213061333, 0.0662878230214119, -0.08928928524255753, -0.023223672062158585, 0.026508206501603127, 0.05361093580722809, -0.0026756669394671917, 0.007845881395041943, 0.0368574783205986, 0.02256181836128235, -0.14516174793243408, -0.12582756578922272, 0.1150519996881485, -0.004468114580959082, -0.045242272317409515, -0.16438861191272736, -0.04957197606563568, 0.005917876027524471, 0.007207100745290518, 0.020951390266418457, 0.008531319908797741, 0.0017857163911685348, 0.07804892212152481, -0.008468426764011383, -0.09659188240766525, -0.032079845666885376, -0.03925231844186783, 0.024353517219424248, 0.032804954797029495, 0.005095369182527065, 0.00401340052485466, 0.1327662318944931, 0.03744642063975334, -0.05364939570426941, -0.08308468014001846, -0.050509676337242126, -0.13673628866672516, -0.03453824669122696, 0.026467658579349518, -0.07711483538150787, 0.047316648066043854, 0.23624466359615326, -0.03661440685391426, 0.028393518179655075, -0.101615771651268, 0.01851639151573181, 0.11872278153896332, 0.09435487538576126, -0.07789400964975357, -0.07155659794807434, -0.0513625405728817, 0.01755458675324917, 0.027149077504873276, -0.02522490918636322, 0.027052324265241623, 0.0656963363289833, 0.008639860898256302, 0.1312534511089325, 0.11754877865314484, 0.02725885435938835, -0.07049956172704697, -0.011469596065580845, 0.16091743111610413, -0.1658441722393036, 0.004361310973763466, 0.025401175022125244, -0.025699790567159653, -0.07945225387811661, 0.06224233657121658, -0.01728968694806099, -0.06055397912859917, 0.10575075447559357, -0.060705214738845825, -0.0695224404335022, -0.09759319573640823, -0.06858989596366882, 0.03550063818693161, -0.005764234811067581, -0.06332487612962723, -0.06497693806886673, -0.1339276134967804, -0.07763753831386566, 0.030946264043450356, -0.0651305615901947, -0.002261987654492259, 0.002286164788529277, 0.015888754278421402, -0.012405629269778728, -0.011896207928657532, 0.10769250243902206, -0.07140861451625824, 0.03445903956890106, -0.048020198941230774, 0.034024447202682495, 0.1074954941868782, 0.02449144423007965, -0.11813440918922424, 0.09227779507637024, -0.10053239017724991, 0.09587568044662476, -0.09404201060533524, -0.025436921045184135, -0.11693789809942245, 0.0149283017963171, -0.022298917174339294, 0.02555539645254612, -0.017252575606107712, 0.08073578029870987, -0.1991943120956421, -0.0048893108032643795, 0.17893417179584503, -0.12016892433166504, -0.05815088003873825, 0.08354280889034271, -0.0257281344383955, 0.05754944682121277, 0.037469975650310516, 0.08299901336431503, 0.08252806961536407, -0.0835314616560936, -0.11218880861997604, -0.04915254935622215, -0.044029440730810165, 0.1491105854511261, 0.053152382373809814, -0.08898208290338516, 0.09594880044460297, 0.03542652353644371, 0.009247878566384315, -0.04820410907268524, -0.008861475624144077, -0.059724219143390656, -0.002304191468283534, -0.037027012556791306, -0.07210271805524826, -0.02383572980761528, -0.08533269912004471, 0.0029404684901237488, -0.056981366127729416, -0.009863813407719135, 0.0986332818865776, -0.020752403885126114, 0.029418103396892548, -0.08655411750078201, 0.05880492925643921, -0.0096216956153512, 0.017682557925581932, -0.21112078428268433, -0.08655498176813126, 0.03670646622776985, -0.1834573894739151, 0.0402897484600544, 0.007772685028612614, 0.020384307950735092, 0.05650303140282631, 0.0006499683950096369, 0.010713006369769573, 0.025676600635051727, -0.0127444202080369, -0.004533978179097176, -0.15073344111442566, -0.04006020724773407, -0.07307878136634827, 0.07626408338546753, -0.11744203418493271, -0.021161705255508423, 0.0622260756790638, 0.15102271735668182, 0.018484987318515778, -0.07668689638376236, 0.04457404091954231, 0.015698295086622238, -0.05586603283882141, -0.05139045789837837, 0.0016018410678952932, -0.008937218226492405, 0.03182513639330864, 0.07466734200716019, -0.18740329146385193, -0.14633327722549438, 0.06827988475561142, 0.12491603195667267, -0.07309898734092712, -0.0660172700881958, -0.07018180191516876, -0.05846790224313736, -0.10141171514987946, -0.05486704036593437, 0.09431558847427368, 0.09308727085590363, 0.05105862393975258, -0.06522797048091888, -0.04851029813289642, -0.004333436954766512, 0.039881885051727295, -0.06902492046356201, 0.10786569118499756, 0.10128144919872284, -0.07932618260383606, 0.09880217909812927, -0.0209586750715971, 0.1074296087026596, 0.10888247191905975, 0.008032175712287426, -0.1187472864985466, -0.011317659169435501, 0.06938279420137405, 0.048832058906555176, 0.08292655646800995, -0.007245495915412903, 0.04244919866323471, 0.0811547040939331, -0.002555728191509843, 0.03319568559527397, -0.07494927942752838, 0.03449643775820732, 0.02523101679980755, -0.00898677110671997, 0.010587531141936779, 0.0016741203144192696, 0.028200164437294006, 0.08900760859251022, 0.007168504409492016, 0.07454274594783783, -0.030677275732159615, -0.04851287603378296, -0.08732020854949951, 0.1395869106054306, -0.09397479891777039, -0.2344324141740799, -0.1756853312253952, -0.0384269542992115, -0.03515637293457985, -0.014177365228533745, 0.04785815253853798, 0.0077248006127774715, -0.10314367711544037, -0.1131829172372818, 0.04485519602894783, 0.03931073844432831, -0.1185070350766182, -0.0346963033080101, 0.028689367696642876, -0.005702786613255739, -0.1630670130252838, 0.02784748002886772, 0.037558864802122116, -0.07018663734197617, 0.022760804742574692, 0.0873146578669548, 0.12195838242769241, 0.09471169114112854, 0.09120601415634155, -0.02064315415918827, -0.015000179409980774, 0.15442483127117157, -0.11208919435739517, 0.03770998865365982, 0.09108180552721024, -0.03774372115731239, 0.07855070382356644, 0.14322030544281006, 0.005810554604977369, -0.07652035355567932, 0.043105822056531906, 0.10053286701440811, -0.06215858832001686, -0.2570033073425293, -0.0819094255566597, -0.030284084379673004, 0.06591211259365082, 0.09703030437231064, 0.07741869986057281, -0.013282348401844501, -0.002732823835685849, -0.10234037786722183, -0.044497858732938766, -0.028706369921565056, 0.06184576079249382, 0.0386444516479969, -0.01418819185346365, 0.04657643288373947, -0.05496012419462204, 0.020884547382593155, 0.12466108798980713, 0.03572004660964012, 0.17679567635059357, -0.04377734288573265, 0.18253406882286072, 0.09286957234144211, 0.08519420772790909, -0.02330712229013443, 0.07395195215940475, -0.012311073951423168, 0.07126441597938538, -0.012074473313987255, -0.09223784506320953, -0.02684818021953106, 0.0982433557510376, 0.04728925600647926, -0.04303903877735138, 0.05781744047999382, -0.051302049309015274, 0.06235742196440697, 0.25867190957069397, 0.0004118932120036334, -0.13168896734714508, -0.03287132456898689, 0.04735994338989258, -0.04711375758051872, -0.09809640049934387, 0.009554190561175346, 0.08608078956604004, -0.15351733565330505, 0.025267401710152626, -0.040654417127370834, 0.06860882043838501, -0.14187933504581451, -0.027988309040665627, -0.027069034054875374, 0.05655720829963684, -0.02365429885685444, 0.10221116989850998, -0.1512831300497055, 0.09259861707687378, -0.012095771729946136, 0.021524062380194664, -0.06481506675481796, 0.0711200162768364, -0.013456475920975208, -0.06508607417345047, 0.15458154678344727, -0.0018848812906071544, -0.11959408223628998, -0.06585992127656937, -0.12409065663814545, -0.013645457103848457, 0.041846681386232376, -0.0986647978425026, 0.11747312545776367, 0.012183918617665768, -0.02523987926542759, -0.043297991156578064, -0.009740296751260757, -0.075111523270607, -0.22089789807796478, 0.08817165344953537, -0.12327000498771667, 0.047204844653606415, -0.05368761718273163, -0.03559305891394615, -0.05276508629322052, 0.1375385820865631, -0.12708619236946106, -0.060780201107263565, -0.10677815228700638, -0.04177412763237953, 0.15074020624160767, -0.06585339456796646, 0.04990202188491821, -0.04748282581567764, 0.16230401396751404, -0.036835066974163055, -0.049447331577539444, 0.011739655397832394, -0.07067052274942398, -0.17028942704200745, -0.04544832557439804, 0.10393918305635452, 0.06276251375675201, 0.010566585697233677, -0.011431731283664703, 0.04887403920292854, 0.013463703915476799, -0.09386314451694489, 0.04664017632603645, 0.13412274420261383, 0.10684262961149216, 0.06026804447174072, -0.03976736217737198, -0.08986611664295197, -0.09487006068229675, -0.09028752893209457, 0.06289972364902496, 0.184724360704422, -0.0620054192841053, 0.14283916354179382, 0.139526829123497, -0.11864499002695084, -0.2053249329328537, -0.09566394239664078, -0.017005501314997673, -0.0160637516528368, 0.11208567768335342, -0.2039881944656372, 0.045114580541849136, 0.08799292892217636, -0.027550959959626198, 0.11174842715263367, -0.2641119360923767, -0.1326146125793457, 0.04735513776540756, 0.036711543798446655, -0.1650640219449997, -0.1384621113538742, -0.08725064992904663, -0.006056012120097876, -0.11731594055891037, 0.10675913095474243, -0.009762638248503208, 0.04212195426225662, -0.018206607550382614, 0.0541481077671051, 0.041576337069272995, -0.071173757314682, 0.12635855376720428, -0.019690679386258125, 0.033877331763505936, -0.08711899071931839, -0.009328631684184074, -0.010105997323989868, -0.04588537663221359, 0.07858698070049286, 0.01988949626684189, 0.03492254018783569, -0.0649334266781807, -0.03680439665913582, -0.050875745713710785, 0.034352775663137436, -0.06139601767063141, -0.061834946274757385, -0.05198642611503601, 0.08213845640420914, 0.07903923839330673, -0.011210653930902481, 0.0312229935079813, -0.043968960642814636, 0.04268697276711464, 0.22220958769321442, 0.07177068293094635, 0.044542234390974045, -0.11167175322771072, -0.04675518348813057, -0.015218395739793777, 0.003750938456505537, -0.078976571559906, 0.05256142094731331, 0.09203750640153885, 0.03283068910241127, 0.10719235241413116, -0.010475878603756428, -0.20010414719581604, 0.0023193249944597483, 0.07745421677827835, -0.10855769366025925, -0.21441499888896942, 0.04559595510363579, 0.05063549429178238, -0.1138937771320343, -0.08369031548500061, 0.09293860197067261, 0.030774150043725967, -0.01624342054128647, 0.009685124270617962, 0.07834553718566895, 0.04481342062354088, 0.0822458416223526, -0.027819007635116577, 0.04078489914536476, -0.06686390936374664, 0.12207230180501938, 0.14794579148292542, -0.11168906837701797, -0.002471120795235038, 0.05984581261873245, -0.04731651023030281, -0.0597001276910305, -0.0428771935403347, 0.045616406947374344, -0.006195694208145142, -0.04041631147265434, -0.013567672111093998, -0.05573222413659096, 0.0736333355307579, 0.13633449375629425, -0.013579348102211952, 0.070701964199543, 0.020045937970280647, -0.004185672849416733, -0.04700053110718727, 0.11092446744441986, 0.028287826105952263, 0.04130461812019348, -0.03138447925448418, 0.03070397861301899, 0.015876686200499535, -0.017824312672019005, 0.019122391939163208, -0.06522135436534882, -0.060807112604379654, 0.01664980687201023, -0.17839384078979492, 0.033730797469615936, -0.08179651200771332, -0.012504374608397484, 0.009083034470677376, 0.00865339394658804, 0.003428853116929531, 0.009529845789074898, -0.05618458241224289, -0.051561880856752396, -0.04526741802692413, 0.12328237295150757, -0.20311428606510162, -0.007474712561815977, 0.08314166963100433, -0.08177987486124039, 0.07285196334123611, 0.002374230418354273, -0.012872731313109398, 0.014907784759998322, -0.07464668154716492, -0.008667714893817902, -0.023702412843704224, 0.05832167714834213, 0.010958651080727577, -0.1348426342010498, -0.016681993380188942, -0.003420184599235654, -0.09164115786552429, -0.003950857557356358, 0.0004332349926698953, -0.14144669473171234, 0.08619020879268646, 0.09814302623271942, -0.041939426213502884, -0.03858897089958191, 0.023740574717521667, 0.030954910442233086, 0.017885081470012665, 0.09453102946281433, -0.027703907340765, 0.033194400370121, -0.15141484141349792, -0.034133899956941605, 0.008060934022068977, 0.009061170741915703, 0.058511391282081604, -0.0006312985206022859, 0.028535732999444008, -0.014243333600461483, 0.2248874008655548, -0.0272710919380188, 0.005468146875500679, 0.023378735408186913, -0.018503230065107346, -0.04884467273950577, 0.029114719480276108, -0.022685857489705086, 0.009387647733092308, 0.014098004437983036, 0.010261460207402706, -0.026664987206459045, -0.05305691063404083, 0.016593052074313164, 0.09306897968053818, 0.10931754857301712, 0.2170015424489975, -0.03222639486193657, 0.04778994247317314, -0.14777761697769165, -0.06451506912708282, -0.0060084061697125435, -0.08647789806127548, 0.05357165262103081, -0.06077692657709122, 0.05619153007864952, 0.11303335428237915, -0.11947716772556305, 0.14224007725715637, -0.04180692136287689, -0.023924468085169792, -0.05733974650502205, -0.18905948102474213, -0.03287208080291748, 0.02952573634684086, 0.00542198121547699, -0.08429916948080063, 0.111164391040802, 0.12445931881666183, 0.011378107592463493, -0.000023699423763900995, 0.06883569061756134, -0.08332779258489609, -0.05113235116004944, -0.03451403230428696, 0.02200275845825672, 0.029063310474157333, 0.01590614952147007, 0.059791646897792816, 0.004284125752747059, 0.04816436767578125, 0.07304155081510544, 0.09789437055587769, 0.05601532757282257, 0.04580041393637657, -0.027547890320420265, -0.039664894342422485, -0.002771863015368581, -0.02622929774224758, -0.06910399347543716, 0.1723375916481018, 0.07216932624578476, 0.02409917116165161, 0.02060159109532833, 0.19823957979679108, -0.019549956545233727, -0.07027270644903183, -0.1375318020582199, 0.17382235825061798, -0.006416799500584602, 0.03095489740371704, 0.02619912475347519, -0.11524339765310287, 0.0011568893678486347, 0.1574116200208664, 0.09955475479364395, 0.012955383397638798, 0.01134408637881279, 0.042026229202747345, 0.022336097434163094, -0.028699487447738647, 0.043595004826784134, 0.041214171797037125, 0.23129963874816895, -0.052405718713998795, 0.08202041685581207, -0.03185148164629936, -0.004379151854664087, -0.03789322450757027, 0.12445353716611862, -0.060326479375362396, 0.02227483130991459, -0.06921926885843277, 0.062353331595659256, -0.07135076820850372, -0.24748747050762177, -0.008219271898269653, -0.05192304402589798, -0.1298334300518036, -0.004560766741633415, 0.035692762583494186, -0.02597453072667122, 0.03829430043697357, 0.035901717841625214, -0.023616837337613106, 0.19165511429309845, 0.013940056785941124, -0.06515047699213028, -0.07442218065261841, 0.058681707829236984, -0.06423678249120712, 0.2803994417190552, 0.006835433654487133, 0.016139252111315727, 0.08373691141605377, -0.014748833142220974, -0.1317623406648636, 0.04834279417991638, 0.08843164145946503, -0.06458649784326553, 0.036104343831539154, 0.13416075706481934, -0.013340378180146217, 0.13884149491786957, 0.0372159481048584, 0.012514977715909481, 0.07617484778165817, 0.0549602247774601, 0.01943296566605568, -0.07583849877119064, 0.05433386191725731, -0.07624156773090363, 0.11853775382041931, 0.11956106871366501, -0.006324150133877993, 0.016078868880867958, -0.0555884949862957, 0.04294972121715546, -0.05140954628586769, 0.10306964069604874, -0.017030920833349228, -0.12326281517744064, 0.054757945239543915, 0.0076875886879861355, 0.07494679093360901, -0.20194949209690094, -0.07407733798027039, 0.10032675415277481, -0.058062005788087845, -0.014587095007300377, 0.08923017978668213, 0.04174424707889557, 0.030552102252840996, -0.0484481044113636, -0.10417560487985611, 0.029646066948771477, 0.1003994345664978, -0.06031898409128189, -0.044867128133773804 ]
f9237e28bf0380005c4686985118f52ae3bcf7bb
# MAPA Maltese Named-Entity Recognition dataset from the [MAPA Project](https://mapa-project.eu/). This dataset has some fixes as detailed in [Cross-Lingual Transfer from Related Languages: Treating Low-Resource Maltese as Multilingual Code-Switching](https://arxiv.org/abs/2401.16895): - Manually fixed some inconsistencies between Level 1 & Level 2 tags. - Manually added the labels for some spans which were marked as entity spans but didn't have the tags. - Manually fixed incorrectly marked spans with respect to tokenisation (either having a sub-word marked as an entity span, or having part of a previous word marked as an entity span; in both cases the whole word should've been marked as a span). - Re-tokenised the dataset using the [MLRS Tokeniser](https://mlrs.research.um.edu.mt/), mainly done to not split off `-` & `'` characters as separate tokens as done by the [official convertor](https://gitlab.com/MAPA-EU-Project/mapa_project/-/blob/master/documentation/detection_training.md#converting-inception-tsv-files-to-jsonlines), since these are linguistically important characters in Maltese. While doing so, any tokens not split off by the tokeniser but which had multiple entity sub-spans, were also split off into separate tokens. Lastly, all tokens ending with `-`/`'` were checked to ensure that these weren't miscellaneous characters (e.g. for number ranges or quotation marks), in which case they were manually split into separate tokens. For `EurLex` documents the same training/validation/testing splits from [joelniklaus/mapa](https://huggingface.co/datasets/joelniklaus/mapa) are kept. Otherwise, for the other domains, we split documents in similar ratios. ## Citations If you used this dataset, please cite these works: - The original dataset: ```bibtex @inproceedings{gianola-2020-mapa, author = {Lucie Gianola and Ēriks Ajausks and Victoria Arranz and Chomicha Bendahman and Laurent Bié and Claudia Borg and Aleix Cerdà and Khalid Choukri and Montse Cuadros and Ona de Gibert and Hans Degroote and Elena Edelman and Thierry Etchegoyhen and Ángela Franco Torres and Mercedes García Hernandez and Aitor García Pablos and Albert Gatt and Cyril Grouin and Manuel Herranz and Alejandro Adolfo Kohan and Thomas Lavergne and Maite Melero and Patrick Paroubek and Mickaël Rigault and Mike Rosner and Roberts Rozis and Lonneke van der Plas and Rinalds Vīksna and Pierre Zweigenbaum}, title = {Automatic Removal of Identifying Information in Official EU Languages for Public Administrations: The {MAPA} Project}, booktitle = {Proceedings of the 33rd International Conference on Legal Knowledge and Information Systems ({JURIX'20})}, pages = {223--226}, year = {2020}, publisher = {IOS Press}, url = {https://ebooks.iospress.nl/volumearticle/56182}, doi = {10.3233/FAIA200869}, } ``` - The fixes & training/validation/testing splits: ```bibtex @misc{micallef-etal-2024-maltese-etymology, title={Cross-Lingual Transfer from Related Languages: Treating Low-Resource {M}altese as Multilingual Code-Switching}, author={Kurt Micallef and Nizar Habash and Claudia Borg and Fadhl Eryani and Houda Bouamor}, year={2024}, eprint={2401.16895}, archivePrefix={arXiv}, primaryClass={cs.CL}, } ```
MLRS/mapa_maltese
[ "task_categories:token-classification", "task_ids:named-entity-recognition", "size_categories:1K<n<10K", "language:mt", "license:cc-by-4.0", "arxiv:2401.16895", "region:us" ]
2024-01-25T14:20:32+00:00
{"language": ["mt"], "license": "cc-by-4.0", "size_categories": ["1K<n<10K"], "task_categories": ["token-classification"], "task_ids": ["named-entity-recognition"], "pretty_name": "MAPA Maltese"}
2024-01-31T07:53:31+00:00
[ "2401.16895" ]
[ "mt" ]
TAGS #task_categories-token-classification #task_ids-named-entity-recognition #size_categories-1K<n<10K #language-Maltese #license-cc-by-4.0 #arxiv-2401.16895 #region-us
# MAPA Maltese Named-Entity Recognition dataset from the MAPA Project. This dataset has some fixes as detailed in Cross-Lingual Transfer from Related Languages: Treating Low-Resource Maltese as Multilingual Code-Switching: - Manually fixed some inconsistencies between Level 1 & Level 2 tags. - Manually added the labels for some spans which were marked as entity spans but didn't have the tags. - Manually fixed incorrectly marked spans with respect to tokenisation (either having a sub-word marked as an entity span, or having part of a previous word marked as an entity span; in both cases the whole word should've been marked as a span). - Re-tokenised the dataset using the MLRS Tokeniser, mainly done to not split off '-' & ''' characters as separate tokens as done by the official convertor, since these are linguistically important characters in Maltese. While doing so, any tokens not split off by the tokeniser but which had multiple entity sub-spans, were also split off into separate tokens. Lastly, all tokens ending with '-'/''' were checked to ensure that these weren't miscellaneous characters (e.g. for number ranges or quotation marks), in which case they were manually split into separate tokens. For 'EurLex' documents the same training/validation/testing splits from joelniklaus/mapa are kept. Otherwise, for the other domains, we split documents in similar ratios. s If you used this dataset, please cite these works: - The original dataset: - The fixes & training/validation/testing splits:
[ "# MAPA Maltese\n\nNamed-Entity Recognition dataset from the MAPA Project.\nThis dataset has some fixes as detailed in Cross-Lingual Transfer from Related Languages: Treating Low-Resource Maltese as Multilingual Code-Switching:\n- Manually fixed some inconsistencies between Level 1 & Level 2 tags.\n- Manually added the labels for some spans which were marked as entity spans but didn't have the tags.\n- Manually fixed incorrectly marked spans with respect to tokenisation (either having a sub-word marked as an entity span, or having part of a previous word marked as an entity span; in both cases the whole word should've been marked as a span).\n- Re-tokenised the dataset using the MLRS Tokeniser, mainly done to not split off '-' & ''' characters as separate tokens as done by the official convertor, since these are linguistically important characters in Maltese.\n While doing so, any tokens not split off by the tokeniser but which had multiple entity sub-spans, were also split off into separate tokens.\n Lastly, all tokens ending with '-'/''' were checked to ensure that these weren't miscellaneous characters (e.g. for number ranges or quotation marks), in which case they were manually split into separate tokens.\n\nFor 'EurLex' documents the same training/validation/testing splits from joelniklaus/mapa are kept.\nOtherwise, for the other domains, we split documents in similar ratios.\n\ns\n\nIf you used this dataset, please cite these works:\n- The original dataset:\n \n- The fixes & training/validation/testing splits:" ]
[ "TAGS\n#task_categories-token-classification #task_ids-named-entity-recognition #size_categories-1K<n<10K #language-Maltese #license-cc-by-4.0 #arxiv-2401.16895 #region-us \n", "# MAPA Maltese\n\nNamed-Entity Recognition dataset from the MAPA Project.\nThis dataset has some fixes as detailed in Cross-Lingual Transfer from Related Languages: Treating Low-Resource Maltese as Multilingual Code-Switching:\n- Manually fixed some inconsistencies between Level 1 & Level 2 tags.\n- Manually added the labels for some spans which were marked as entity spans but didn't have the tags.\n- Manually fixed incorrectly marked spans with respect to tokenisation (either having a sub-word marked as an entity span, or having part of a previous word marked as an entity span; in both cases the whole word should've been marked as a span).\n- Re-tokenised the dataset using the MLRS Tokeniser, mainly done to not split off '-' & ''' characters as separate tokens as done by the official convertor, since these are linguistically important characters in Maltese.\n While doing so, any tokens not split off by the tokeniser but which had multiple entity sub-spans, were also split off into separate tokens.\n Lastly, all tokens ending with '-'/''' were checked to ensure that these weren't miscellaneous characters (e.g. for number ranges or quotation marks), in which case they were manually split into separate tokens.\n\nFor 'EurLex' documents the same training/validation/testing splits from joelniklaus/mapa are kept.\nOtherwise, for the other domains, we split documents in similar ratios.\n\ns\n\nIf you used this dataset, please cite these works:\n- The original dataset:\n \n- The fixes & training/validation/testing splits:" ]
[ 68, 395 ]
[ "passage: TAGS\n#task_categories-token-classification #task_ids-named-entity-recognition #size_categories-1K<n<10K #language-Maltese #license-cc-by-4.0 #arxiv-2401.16895 #region-us \n# MAPA Maltese\n\nNamed-Entity Recognition dataset from the MAPA Project.\nThis dataset has some fixes as detailed in Cross-Lingual Transfer from Related Languages: Treating Low-Resource Maltese as Multilingual Code-Switching:\n- Manually fixed some inconsistencies between Level 1 & Level 2 tags.\n- Manually added the labels for some spans which were marked as entity spans but didn't have the tags.\n- Manually fixed incorrectly marked spans with respect to tokenisation (either having a sub-word marked as an entity span, or having part of a previous word marked as an entity span; in both cases the whole word should've been marked as a span).\n- Re-tokenised the dataset using the MLRS Tokeniser, mainly done to not split off '-' & ''' characters as separate tokens as done by the official convertor, since these are linguistically important characters in Maltese.\n While doing so, any tokens not split off by the tokeniser but which had multiple entity sub-spans, were also split off into separate tokens.\n Lastly, all tokens ending with '-'/''' were checked to ensure that these weren't miscellaneous characters (e.g. for number ranges or quotation marks), in which case they were manually split into separate tokens.\n\nFor 'EurLex' documents the same training/validation/testing splits from joelniklaus/mapa are kept.\nOtherwise, for the other domains, we split documents in similar ratios.\n\ns\n\nIf you used this dataset, please cite these works:\n- The original dataset:\n \n- The fixes & training/validation/testing splits:" ]
[ -0.06967829912900925, -0.10790275037288666, -0.006942423526197672, 0.024292126297950745, 0.07170159369707108, 0.020446373149752617, 0.08148530125617981, 0.07005202025175095, -0.06338825821876526, 0.13025011122226715, -0.09750593453645706, 0.031442947685718536, 0.02841079793870449, 0.09710848331451416, -0.04686760902404785, -0.14793621003627777, 0.02504020743072033, -0.0594726987183094, 0.05670890584588051, 0.08497332781553268, 0.12004970014095306, -0.03728712350130081, 0.0407111756503582, -0.024577319622039795, -0.04864373058080673, -0.022686386480927467, 0.051847390830516815, -0.000775259337387979, 0.08033394813537598, 0.08223336935043335, 0.11985328048467636, -0.00880065280944109, -0.011184788309037685, -0.13739070296287537, 0.028989223763346672, 0.12880079448223114, 0.08153162896633148, 0.03226635605096817, 0.06477005779743195, -0.0010293733794242144, 0.045739952474832535, -0.1280926614999771, -0.002713325899094343, 0.002710406668484211, -0.0987892746925354, -0.02962198108434677, -0.15673525631427765, 0.01902325078845024, 0.11643043160438538, -0.03154924511909485, -0.031525868922472, 0.09742369502782822, 0.04867595434188843, 0.08984428644180298, 0.06863703578710556, -0.1703011691570282, 0.0006188488332554698, 0.022697215899825096, 0.05532774701714516, 0.11437397450208664, -0.028997192159295082, 0.030072472989559174, -0.016780763864517212, 0.01081553753465414, -0.0919186994433403, -0.058264680206775665, -0.026106692850589752, -0.050785332918167114, -0.1193670704960823, -0.012407280504703522, 0.17678454518318176, -0.010709519498050213, -0.1266714483499527, -0.13254782557487488, -0.08574184775352478, 0.05099063366651535, 0.055202607065439224, -0.03811035305261612, -0.020772339776158333, 0.04621073603630066, 0.13550721108913422, -0.07335911691188812, -0.11723579466342926, -0.016391383484005928, -0.21497146785259247, 0.2079789936542511, 0.015516179613769054, 0.012286866083741188, -0.05583065003156662, 0.08916883170604706, -0.07706118375062943, -0.02230868488550186, -0.054892417043447495, -0.023114226758480072, -0.12011328339576721, -0.09215593338012695, -0.10052964836359024, -0.1925540715456009, -0.007344084791839123, 0.02978021465241909, -0.051382794976234436, -0.02661655656993389, -0.19848662614822388, 0.10256126523017883, 0.040017955005168915, 0.08982444554567337, -0.0024907251354306936, 0.03123079612851143, 0.050610434263944626, -0.07964394241571426, 0.05548468232154846, -0.02024022862315178, -0.009159853681921959, -0.02820763923227787, -0.05851154774427414, 0.08758529275655746, 0.10868265479803085, 0.04001040756702423, -0.0348607636988163, -0.02366693690419197, 0.06254716217517853, -0.1817052662372589, 0.022273140028119087, 0.0008774054003879428, -0.006615161430090666, 0.05382232740521431, -0.08061091601848602, 0.061916980892419815, -0.1072131022810936, 0.197797492146492, 0.03211512416601181, 0.03128889203071594, -0.060437120497226715, -0.10486122220754623, 0.08352399617433548, 0.082412488758564, -0.07448332756757736, -0.10415535420179367, -0.17377229034900665, -0.09333369880914688, -0.02622435986995697, -0.00951682310551405, 0.030508384108543396, 0.0576401986181736, 0.0363696813583374, -0.033636100590229034, 0.002649619709700346, 0.015474068932235241, -0.01044175960123539, 0.03849171847105026, -0.057219766080379486, 0.009745248593389988, -0.019023597240447998, 0.0048002018593251705, -0.14701153337955475, -0.012400206178426743, -0.27630797028541565, 0.11691029369831085, 0.011146225035190582, -0.05064580962061882, -0.061003439128398895, 0.05718466639518738, -0.09151820093393326, 0.04665255546569824, 0.004341506399214268, 0.09251084923744202, -0.21952056884765625, -0.017274366691708565, 0.12588384747505188, -0.15965452790260315, 0.05852160602807999, 0.14112325012683868, 0.019038844853639603, 0.09484578669071198, 0.13726383447647095, 0.20565924048423767, -0.024612730368971825, -0.03340776264667511, -0.05852813273668289, -0.09352675080299377, -0.045437298715114594, 0.19315758347511292, 0.10713280737400055, -0.11125437915325165, 0.2035561352968216, -0.010364125482738018, -0.04762057960033417, -0.08469200879335403, 0.033042289316654205, -0.009035210125148296, -0.037149425595998764, 0.03829801827669144, 0.018367711454629898, -0.06292546540498734, -0.039034344255924225, -0.05423113331198692, -0.0638645738363266, -0.003986241761595011, 0.0467957966029644, -0.024175722151994705, 0.15405228734016418, -0.05174385383725166, 0.019867539405822754, -0.027966570109128952, 0.017349446192383766, -0.18193230032920837, -0.15498165786266327, 0.04789181798696518, -0.15917661786079407, 0.07339588552713394, 0.020251410081982613, 0.015186799690127373, 0.05776746943593025, -0.08613882958889008, 0.05523388087749481, -0.007896514609456062, -0.0036617680452764034, -0.08637980371713638, -0.11441478133201599, 0.033524755388498306, -0.0667320191860199, -0.05559869855642319, -0.04285931587219238, 0.053689684718847275, 0.16744500398635864, 0.16036510467529297, 0.045647066086530685, -0.06607142090797424, -0.018864283338189125, 0.10213422775268555, -0.026105564087629318, -0.031712714582681656, 0.0013793195830658078, 0.012379685416817665, 0.012002682313323021, 0.16212061047554016, -0.1928672045469284, -0.14713141322135925, 0.05078041926026344, 0.0562523677945137, -0.07616957277059555, -0.030271021649241447, -0.014977095648646355, 0.009641635231673717, -0.1425895392894745, -0.06762812286615372, 0.037411484867334366, 0.10058080404996872, 0.05955839529633522, -0.06298977136611938, -0.010281764902174473, -0.02888450026512146, -0.0597735233604908, -0.026303522288799286, 0.025299524888396263, -0.08421409130096436, -0.21694570779800415, 0.0885302722454071, 0.07174065709114075, 0.009767900221049786, 0.11700219660997391, 0.008145525120198727, -0.11070811748504639, -0.07373853772878647, 0.092203289270401, 0.03207457438111305, -0.016985099762678146, -0.017234336584806442, 0.03168443217873573, 0.022495919838547707, 0.11065603792667389, 0.06017475947737694, -0.052394114434719086, 0.0531218945980072, 0.037907835096120834, -0.0018336193170398474, -0.05416522175073624, 0.05831819772720337, 0.012804227881133556, 0.10735992342233658, -0.0545353926718235, 0.13897466659545898, 0.025312071666121483, -0.04687562212347984, -0.1134795993566513, 0.0774603933095932, -0.09500376880168915, -0.18872612714767456, -0.2424725443124771, 0.02784447930753231, -0.0941985622048378, 0.01651816815137863, 0.061628494411706924, 0.01892745867371559, 0.0015303266700357199, -0.04205317422747612, 0.1201319694519043, -0.0022807351779192686, -0.03938805311918259, -0.08107823133468628, 0.062029387801885605, -0.07033951580524445, -0.023548973724246025, 0.0010490355780348182, 0.011323207058012486, 0.010459644719958305, -0.00593474879860878, 0.02284197509288788, 0.0724322646856308, 0.08985641598701477, -0.057899631559848785, -0.03585679829120636, -0.05201200768351555, 0.037970513105392456, -0.11236506700515747, 0.08456786721944809, 0.14256995916366577, -0.10538489371538162, 0.07150150835514069, 0.1147269457578659, 0.0038002950605005026, 0.0017771697603166103, 0.030614349991083145, 0.06670064479112625, -0.05038541927933693, -0.23622870445251465, -0.08579247444868088, -0.09585794806480408, 0.01387865375727415, -0.08137434720993042, 0.004129940178245306, -0.08215988427400589, -0.035358600318431854, -0.1478438824415207, -0.10881924629211426, 0.041269078850746155, 0.06634385883808136, 0.19872328639030457, -0.017567386850714684, 0.09345480054616928, -0.04183080047369003, -0.0018555520800873637, 0.09652656316757202, 0.07294280081987381, 0.08237795531749725, -0.0057005323469638824, 0.04552237689495087, 0.015762455761432648, -0.02126896381378174, 0.0694475769996643, 0.047222767025232315, 0.004230017773807049, 0.05643323063850403, -0.04767223075032234, -0.08567044138908386, 0.040440913289785385, 0.05448533594608307, -0.004946961998939514, -0.08221706748008728, -0.007578231859952211, -0.006679059937596321, 0.08663363754749298, 0.092872753739357, 0.01970788836479187, 0.012527908198535442, -0.06753219664096832, 0.031972311437129974, -0.046150367707014084, -0.04708046093583107, -0.06382825970649719, 0.1575912982225418, -0.1052580326795578, 0.01788850501179695, 0.015183265320956707, 0.06888622045516968, -0.0655585452914238, 0.0009366411832161248, -0.06947172433137894, -0.013873384334146976, -0.06158052757382393, 0.07143020629882812, -0.1715817004442215, 0.12033652514219284, 0.042666107416152954, 0.08985389769077301, -0.011737450025975704, 0.029459914192557335, -0.0015399637632071972, 0.12658095359802246, 0.14279332756996155, 0.029799209907650948, -0.264636367559433, -0.0952363908290863, -0.017765656113624573, -0.024661017581820488, 0.14706814289093018, -0.0889788568019867, 0.07162825018167496, -0.0032992379274219275, 0.061938390135765076, -0.025182144716382027, 0.1102246418595314, -0.036840733140707016, -0.13469180464744568, 0.118770070374012, 0.046219680458307266, -0.0005163702298887074, -0.02785368263721466, -0.0010438377503305674, -0.02054000273346901, 0.13329415023326874, -0.1343914121389389, -0.03704553842544556, -0.08478498458862305, -0.03859390318393707, 0.0829635038971901, -0.09869472682476044, -0.09121014922857285, -0.020458977669477463, 0.08643987029790878, -0.06719879060983658, -0.028431233018636703, 0.026720140129327774, -0.05279523506760597, -0.15545298159122467, -0.055439628660678864, 0.10703594237565994, -0.023535195738077164, 0.02120431326329708, 0.04226475581526756, 0.08806060999631882, 0.010938936844468117, -0.04533347859978676, 0.07264705747365952, 0.08099646121263504, 0.08775540441274643, 0.16160665452480316, -0.20425833761692047, -0.04621514678001404, -0.08786414563655853, -0.17555458843708038, 0.09854085743427277, 0.13269762694835663, -0.008595417253673077, 0.08773092925548553, 0.17454303801059723, -0.14755773544311523, -0.13884085416793823, -0.017564520239830017, -0.04818280413746834, 0.06296268105506897, 0.004556187428534031, -0.06429636478424072, 0.07040934264659882, 0.13376426696777344, 0.039716143161058426, -0.08863601088523865, -0.19970467686653137, -0.1340978443622589, -0.03201201558113098, -0.11986492574214935, 0.01599658839404583, -0.13169223070144653, -0.062960684299469, -0.039596572518348694, 0.04258405789732933, 0.024006681516766548, -0.11742321401834488, 0.09577576071023941, 0.08766156435012817, 0.04227098077535629, 0.031002432107925415, -0.01865164376795292, 0.15467295050621033, 0.01862969435751438, -0.01764734461903572, -0.022124744951725006, -0.06757659465074539, 0.1831914633512497, -0.012454235926270485, 0.017906541004776955, 0.02824927121400833, -0.017766283825039864, -0.07718384265899658, -0.062223438173532486, -0.0404006652534008, 0.10519691556692123, -0.0400225929915905, -0.03875968977808952, -0.04852106049656868, 0.036388903856277466, 0.0692182183265686, -0.02662932686507702, -0.07161876559257507, -0.11531059443950653, 0.029420161619782448, 0.18516412377357483, 0.020274752750992775, -0.010850884020328522, -0.11021246761083603, 0.020819829776883125, -0.03280891478061676, 0.030594101175665855, 0.032485634088516235, 0.06055888161063194, 0.10432842373847961, -0.005465625785291195, 0.060634683817625046, 0.03280577063560486, -0.09419050812721252, 0.027374576777219772, 0.0543338805437088, -0.03091154620051384, -0.12857696413993835, 0.03992872312664986, -0.01856246404349804, -0.05306582152843475, 0.03762926906347275, 0.18886665999889374, 0.03135434910655022, 0.008731214329600334, -0.014378451742231846, 0.058432724326848984, -0.051534224301576614, 0.16761338710784912, -0.05084462836384773, 0.013572268187999725, -0.008914640173316002, 0.029367974027991295, 0.0944613516330719, -0.12034082412719727, -0.023181630298495293, 0.011311739683151245, -0.08864901959896088, -0.036958273500204086, -0.062046125531196594, -0.030998283997178078, -0.08792205899953842, -0.07174795120954514, -0.052442051470279694, -0.04206186160445213, -0.028669998049736023, 0.122316874563694, 0.037940189242362976, 0.09296144545078278, -0.05699000880122185, -0.06893059611320496, -0.03300775960087776, 0.06927181780338287, 0.10898139327764511, 0.07085475325584412, -0.03661011531949043, 0.2958683669567108, -0.0876341313123703, -0.05173643305897713, 0.011727798730134964, 0.017042506486177444, -0.032438356429338455, 0.02024608477950096, 0.07738714665174484, -0.030010206624865532, -0.01320972666144371, -0.031106777489185333, 0.012208078056573868, -0.06130622699856758, 0.0011888950830325484, 0.0020696648862212896, -0.02522869221866131, -0.010509386658668518, -0.07802446186542511, 0.05303622782230377, -0.1632387936115265, 0.012262032367289066, 0.07942311465740204, -0.06890133768320084, 0.045358650386333466, 0.0390692800283432, -0.039194505661726, -0.02879754640161991, -0.05798067897558212, 0.024343343451619148, -0.06039982661604881, 0.04811770096421242, -0.05627843365073204, -0.12809406220912933, 0.015680905431509018, 0.04578794166445732, 0.004529101308435202, 0.0792461708188057, 0.16724340617656708, -0.0802418440580368, 0.03687291219830513, 0.03901759535074234, -0.08082395792007446, -0.04078824445605278, 0.04027010500431061, 0.04679417237639427, 0.1386951357126236, 0.06517046689987183, 0.00010771056986413896, 0.0650036558508873, -0.03727768734097481, 0.037866439670324326, 0.040135446935892105, -0.027030099183321, 0.09353543072938919, -0.06285178661346436, 0.05286546051502228, 0.013762623071670532, 0.0981682613492012, 0.07390548288822174, -0.04203469678759575, 0.08861032128334045, -0.007398461457341909, -0.12335412204265594, 0.0110408840700984, 0.07481115311384201, -0.0015227471012622118, -0.022519096732139587, -0.027142873033881187, -0.05447590723633766, -0.05510834977030754, -0.01671943999826908, 0.15497593581676483, 0.20465002954006195, 0.1406223177909851, 0.027512554079294205, 0.028947940096259117, -0.02778867445886135, -0.10886549949645996, -0.01828034594655037, 0.004292566329240799, -0.013639739714562893, -0.07871542125940323, 0.04130899906158447, 0.11466293781995773, -0.13364078104496002, 0.11102539300918579, -0.011800193227827549, -0.052934352308511734, -0.12371794879436493, -0.13467352092266083, -0.03222489729523659, 0.0412590317428112, -0.05819329619407654, -0.0753779411315918, 0.11555269360542297, 0.0708601325750351, -0.022547828033566475, 0.0086022038012743, 0.09422481805086136, -0.19854919612407684, -0.09591977298259735, 0.05048833042383194, -0.025546308606863022, 0.0792229175567627, 0.06641512364149094, -0.038817692548036575, 0.012977715581655502, 0.02635093778371811, 0.07611233741044998, 0.05157311260700226, 0.11477338522672653, -0.034635066986083984, -0.14565131068229675, 0.012473749928176403, -0.014547172002494335, 0.007735622581094503, 0.07202360779047012, 0.11560980975627899, 0.06223762780427933, -0.04320770874619484, 0.00462196534499526, 0.2025071531534195, -0.02200785093009472, -0.1570626199245453, -0.13462097942829132, 0.08565548062324524, 0.05946332961320877, 0.06854596734046936, -0.031622469425201416, -0.1271737515926361, 0.06725241243839264, 0.08242311328649521, 0.07316838949918747, 0.03184318169951439, 0.016785945743322372, 0.08572793006896973, 0.029995549470186234, 0.020456623286008835, 0.0050398739986121655, 0.02002088353037834, 0.18136276304721832, -0.02165776863694191, 0.19264593720436096, -0.015039137564599514, 0.0011077525559812784, -0.05738306790590286, 0.14411106705665588, -0.08214685320854187, -0.013975643552839756, -0.057682279497385025, 0.1383095383644104, 0.08663208782672882, -0.23462334275245667, -0.0036089608911424875, -0.10978574305772781, -0.04089478775858879, 0.011235810816287994, 0.09839178621768951, -0.06462237983942032, 0.07806330919265747, -0.01681012474000454, -0.05586883798241615, 0.06381537765264511, 0.012563375756144524, 0.04005861654877663, -0.06237731873989105, 0.03667765110731125, -0.09399954974651337, -0.0007032168796285987, 0.02120107412338257, 0.16797813773155212, 0.07366347312927246, -0.008590947836637497, -0.046366795897483826, -0.018066687509417534, -0.012432937510311604, -0.033070482313632965, 0.047319669276475906, 0.12526127696037292, -0.048570748418569565, 0.07335880398750305, 0.06827518343925476, -0.060835208743810654, 0.09884320199489594, 0.06309904903173447, -0.05277292802929878, -0.058916348963975906, 0.06458774954080582, -0.15548761188983917, 0.0801076740026474, 0.12858006358146667, 0.017080748453736305, 0.09064417332410812, -0.03338250890374184, -0.01775733195245266, 0.06174817681312561, 0.0657566711306572, 0.0413861908018589, -0.1594666987657547, 0.016191955655813217, 0.03763795644044876, 0.07596603035926819, -0.18157286942005157, -0.07358250766992569, 0.12241321802139282, -0.020749056711792946, -0.019233662635087967, 0.07781778275966644, 0.09136155992746353, -0.09145711362361908, -0.02040364407002926, -0.2866847515106201, 0.04247884452342987, 0.08398429304361343, -0.023571059107780457, -0.0020613009110093117 ]
a8b618a3e22cacd42b8fac968126d7a22468ac03
# Dataset Card for Evaluation run of AA051612/A0124 <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [AA051612/A0124](https://huggingface.co/AA051612/A0124) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_AA051612__A0124", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-01-25T14:19:16.198603](https://huggingface.co/datasets/open-llm-leaderboard/details_AA051612__A0124/blob/main/results_2024-01-25T14-19-16.198603.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.8268443438842564, "acc_stderr": 0.024801319555947502, "acc_norm": 0.8344552297563383, "acc_norm_stderr": 0.0252029147367926, "mc1": 0.390452876376989, "mc1_stderr": 0.017078230743431448, "mc2": 0.5652174373687721, "mc2_stderr": 0.015479461186777867 }, "harness|arc:challenge|25": { "acc": 0.6399317406143344, "acc_stderr": 0.014027516814585188, "acc_norm": 0.6783276450511946, "acc_norm_stderr": 0.013650488084494164 }, "harness|hellaswag|10": { "acc": 0.6504680342561243, "acc_stderr": 0.004758476684324042, "acc_norm": 0.8471420035849433, "acc_norm_stderr": 0.003591151323268329 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.62, "acc_stderr": 0.04878317312145632, "acc_norm": 0.62, "acc_norm_stderr": 0.04878317312145632 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.837037037037037, "acc_stderr": 0.03190541474482841, "acc_norm": 0.837037037037037, "acc_norm_stderr": 0.03190541474482841 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.9210526315789473, "acc_stderr": 0.021944342818247923, "acc_norm": 0.9210526315789473, "acc_norm_stderr": 0.021944342818247923 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.83, "acc_stderr": 0.03775251680686371, "acc_norm": 0.83, "acc_norm_stderr": 0.03775251680686371 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.879245283018868, "acc_stderr": 0.020054189400972373, "acc_norm": 0.879245283018868, "acc_norm_stderr": 0.020054189400972373 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.9375, "acc_stderr": 0.02024219611347799, "acc_norm": 0.9375, "acc_norm_stderr": 0.02024219611347799 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.62, "acc_stderr": 0.048783173121456316, "acc_norm": 0.62, "acc_norm_stderr": 0.048783173121456316 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.73, "acc_stderr": 0.04461960433384741, "acc_norm": 0.73, "acc_norm_stderr": 0.04461960433384741 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.6, "acc_stderr": 0.049236596391733084, "acc_norm": 0.6, "acc_norm_stderr": 0.049236596391733084 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.8208092485549133, "acc_stderr": 0.02924251305906329, "acc_norm": 0.8208092485549133, "acc_norm_stderr": 0.02924251305906329 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.696078431372549, "acc_stderr": 0.04576665403207763, "acc_norm": 0.696078431372549, "acc_norm_stderr": 0.04576665403207763 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.84, "acc_stderr": 0.0368452949177471, "acc_norm": 0.84, "acc_norm_stderr": 0.0368452949177471 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.8425531914893617, "acc_stderr": 0.023809905196619702, "acc_norm": 0.8425531914893617, "acc_norm_stderr": 0.023809905196619702 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.7105263157894737, "acc_stderr": 0.04266339443159394, "acc_norm": 0.7105263157894737, "acc_norm_stderr": 0.04266339443159394 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.8551724137931035, "acc_stderr": 0.029327243269363392, "acc_norm": 0.8551724137931035, "acc_norm_stderr": 0.029327243269363392 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.8174603174603174, "acc_stderr": 0.019894879367175548, "acc_norm": 0.8174603174603174, "acc_norm_stderr": 0.019894879367175548 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.5396825396825397, "acc_stderr": 0.04458029125470973, "acc_norm": 0.5396825396825397, "acc_norm_stderr": 0.04458029125470973 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.65, "acc_stderr": 0.0479372485441102, "acc_norm": 0.65, "acc_norm_stderr": 0.0479372485441102 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.932258064516129, "acc_stderr": 0.014296101903893372, "acc_norm": 0.932258064516129, "acc_norm_stderr": 0.014296101903893372 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.7339901477832512, "acc_stderr": 0.03108982600293753, "acc_norm": 0.7339901477832512, "acc_norm_stderr": 0.03108982600293753 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.86, "acc_stderr": 0.034873508801977704, "acc_norm": 0.86, "acc_norm_stderr": 0.034873508801977704 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.9333333333333333, "acc_stderr": 0.019478290326359282, "acc_norm": 0.9333333333333333, "acc_norm_stderr": 0.019478290326359282 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.9494949494949495, "acc_stderr": 0.015602012491972255, "acc_norm": 0.9494949494949495, "acc_norm_stderr": 0.015602012491972255 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.9792746113989638, "acc_stderr": 0.010281417011909029, "acc_norm": 0.9792746113989638, "acc_norm_stderr": 0.010281417011909029 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.8641025641025641, "acc_stderr": 0.01737454649323547, "acc_norm": 0.8641025641025641, "acc_norm_stderr": 0.01737454649323547 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.6111111111111112, "acc_stderr": 0.029723278961476664, "acc_norm": 0.6111111111111112, "acc_norm_stderr": 0.029723278961476664 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.9369747899159664, "acc_stderr": 0.015785085223670926, "acc_norm": 0.9369747899159664, "acc_norm_stderr": 0.015785085223670926 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.6291390728476821, "acc_stderr": 0.03943966699183629, "acc_norm": 0.6291390728476821, "acc_norm_stderr": 0.03943966699183629 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.9559633027522936, "acc_stderr": 0.008796877218234045, "acc_norm": 0.9559633027522936, "acc_norm_stderr": 0.008796877218234045 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.7731481481481481, "acc_stderr": 0.028561650102422273, "acc_norm": 0.7731481481481481, "acc_norm_stderr": 0.028561650102422273 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.9509803921568627, "acc_stderr": 0.01515383934021268, "acc_norm": 0.9509803921568627, "acc_norm_stderr": 0.01515383934021268 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.9409282700421941, "acc_stderr": 0.015346597463888697, "acc_norm": 0.9409282700421941, "acc_norm_stderr": 0.015346597463888697 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.8430493273542601, "acc_stderr": 0.024413587174907412, "acc_norm": 0.8430493273542601, "acc_norm_stderr": 0.024413587174907412 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.9312977099236641, "acc_stderr": 0.022184936922745042, "acc_norm": 0.9312977099236641, "acc_norm_stderr": 0.022184936922745042 }, "harness|hendrycksTest-international_law|5": { "acc": 0.9338842975206612, "acc_stderr": 0.022683403691723312, "acc_norm": 0.9338842975206612, "acc_norm_stderr": 0.022683403691723312 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.9351851851851852, "acc_stderr": 0.023800937426629205, "acc_norm": 0.9351851851851852, "acc_norm_stderr": 0.023800937426629205 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.9386503067484663, "acc_stderr": 0.01885387414579323, "acc_norm": 0.9386503067484663, "acc_norm_stderr": 0.01885387414579323 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.6875, "acc_stderr": 0.043994650575715215, "acc_norm": 0.6875, "acc_norm_stderr": 0.043994650575715215 }, "harness|hendrycksTest-management|5": { "acc": 0.9223300970873787, "acc_stderr": 0.026501440784762752, "acc_norm": 0.9223300970873787, "acc_norm_stderr": 0.026501440784762752 }, "harness|hendrycksTest-marketing|5": { "acc": 0.9658119658119658, "acc_stderr": 0.011904341997629818, "acc_norm": 0.9658119658119658, "acc_norm_stderr": 0.011904341997629818 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.9, "acc_stderr": 0.030151134457776348, "acc_norm": 0.9, "acc_norm_stderr": 0.030151134457776348 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.946360153256705, "acc_stderr": 0.00805691182236487, "acc_norm": 0.946360153256705, "acc_norm_stderr": 0.00805691182236487 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.8410404624277457, "acc_stderr": 0.019685307033571946, "acc_norm": 0.8410404624277457, "acc_norm_stderr": 0.019685307033571946 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.8502793296089386, "acc_stderr": 0.011933090460111657, "acc_norm": 0.8502793296089386, "acc_norm_stderr": 0.011933090460111657 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.9019607843137255, "acc_stderr": 0.01702722293558219, "acc_norm": 0.9019607843137255, "acc_norm_stderr": 0.01702722293558219 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.887459807073955, "acc_stderr": 0.017949292186800647, "acc_norm": 0.887459807073955, "acc_norm_stderr": 0.017949292186800647 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.9228395061728395, "acc_stderr": 0.014847704893944928, "acc_norm": 0.9228395061728395, "acc_norm_stderr": 0.014847704893944928 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.7588652482269503, "acc_stderr": 0.02551873104953777, "acc_norm": 0.7588652482269503, "acc_norm_stderr": 0.02551873104953777 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.7672750977835724, "acc_stderr": 0.01079259555388848, "acc_norm": 0.7672750977835724, "acc_norm_stderr": 0.01079259555388848 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.9301470588235294, "acc_stderr": 0.015484012441056329, "acc_norm": 0.9301470588235294, "acc_norm_stderr": 0.015484012441056329 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.8774509803921569, "acc_stderr": 0.013266175773054252, "acc_norm": 0.8774509803921569, "acc_norm_stderr": 0.013266175773054252 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.8, "acc_stderr": 0.03831305140884601, "acc_norm": 0.8, "acc_norm_stderr": 0.03831305140884601 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.8857142857142857, "acc_stderr": 0.020367976491952145, "acc_norm": 0.8857142857142857, "acc_norm_stderr": 0.020367976491952145 }, "harness|hendrycksTest-sociology|5": { "acc": 0.9303482587064676, "acc_stderr": 0.018000052253856254, "acc_norm": 0.9303482587064676, "acc_norm_stderr": 0.018000052253856254 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.96, "acc_stderr": 0.01969463855669321, "acc_norm": 0.96, "acc_norm_stderr": 0.01969463855669321 }, "harness|hendrycksTest-virology|5": { "acc": 0.6506024096385542, "acc_stderr": 0.037117251907407514, "acc_norm": 0.6506024096385542, "acc_norm_stderr": 0.037117251907407514 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.9415204678362573, "acc_stderr": 0.017996678857280124, "acc_norm": 0.9415204678362573, "acc_norm_stderr": 0.017996678857280124 }, "harness|truthfulqa:mc|0": { "mc1": 0.390452876376989, "mc1_stderr": 0.017078230743431448, "mc2": 0.5652174373687721, "mc2_stderr": 0.015479461186777867 }, "harness|winogrande|5": { "acc": 0.8074191002367798, "acc_stderr": 0.0110825388474919 }, "harness|gsm8k|5": { "acc": 0.6322971948445792, "acc_stderr": 0.013281630503395482 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_AA051612__A0124
[ "region:us" ]
2024-01-25T14:21:39+00:00
{"pretty_name": "Evaluation run of AA051612/A0124", "dataset_summary": "Dataset automatically created during the evaluation run of model [AA051612/A0124](https://huggingface.co/AA051612/A0124) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_AA051612__A0124\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-01-25T14:19:16.198603](https://huggingface.co/datasets/open-llm-leaderboard/details_AA051612__A0124/blob/main/results_2024-01-25T14-19-16.198603.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.8268443438842564,\n \"acc_stderr\": 0.024801319555947502,\n \"acc_norm\": 0.8344552297563383,\n \"acc_norm_stderr\": 0.0252029147367926,\n \"mc1\": 0.390452876376989,\n \"mc1_stderr\": 0.017078230743431448,\n \"mc2\": 0.5652174373687721,\n \"mc2_stderr\": 0.015479461186777867\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.6399317406143344,\n \"acc_stderr\": 0.014027516814585188,\n \"acc_norm\": 0.6783276450511946,\n \"acc_norm_stderr\": 0.013650488084494164\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6504680342561243,\n \"acc_stderr\": 0.004758476684324042,\n \"acc_norm\": 0.8471420035849433,\n \"acc_norm_stderr\": 0.003591151323268329\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.62,\n \"acc_stderr\": 0.04878317312145632,\n \"acc_norm\": 0.62,\n \"acc_norm_stderr\": 0.04878317312145632\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.837037037037037,\n \"acc_stderr\": 0.03190541474482841,\n \"acc_norm\": 0.837037037037037,\n \"acc_norm_stderr\": 0.03190541474482841\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.9210526315789473,\n \"acc_stderr\": 0.021944342818247923,\n \"acc_norm\": 0.9210526315789473,\n \"acc_norm_stderr\": 0.021944342818247923\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.83,\n \"acc_stderr\": 0.03775251680686371,\n \"acc_norm\": 0.83,\n \"acc_norm_stderr\": 0.03775251680686371\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.879245283018868,\n \"acc_stderr\": 0.020054189400972373,\n \"acc_norm\": 0.879245283018868,\n \"acc_norm_stderr\": 0.020054189400972373\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.9375,\n \"acc_stderr\": 0.02024219611347799,\n \"acc_norm\": 0.9375,\n \"acc_norm_stderr\": 0.02024219611347799\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.62,\n \"acc_stderr\": 0.048783173121456316,\n \"acc_norm\": 0.62,\n \"acc_norm_stderr\": 0.048783173121456316\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.73,\n \"acc_stderr\": 0.04461960433384741,\n \"acc_norm\": 0.73,\n \"acc_norm_stderr\": 0.04461960433384741\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.6,\n \"acc_stderr\": 0.049236596391733084,\n \"acc_norm\": 0.6,\n \"acc_norm_stderr\": 0.049236596391733084\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.8208092485549133,\n \"acc_stderr\": 0.02924251305906329,\n \"acc_norm\": 0.8208092485549133,\n \"acc_norm_stderr\": 0.02924251305906329\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.696078431372549,\n \"acc_stderr\": 0.04576665403207763,\n \"acc_norm\": 0.696078431372549,\n \"acc_norm_stderr\": 0.04576665403207763\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.84,\n \"acc_stderr\": 0.0368452949177471,\n \"acc_norm\": 0.84,\n \"acc_norm_stderr\": 0.0368452949177471\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.8425531914893617,\n \"acc_stderr\": 0.023809905196619702,\n \"acc_norm\": 0.8425531914893617,\n \"acc_norm_stderr\": 0.023809905196619702\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.7105263157894737,\n \"acc_stderr\": 0.04266339443159394,\n \"acc_norm\": 0.7105263157894737,\n \"acc_norm_stderr\": 0.04266339443159394\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.8551724137931035,\n \"acc_stderr\": 0.029327243269363392,\n \"acc_norm\": 0.8551724137931035,\n \"acc_norm_stderr\": 0.029327243269363392\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.8174603174603174,\n \"acc_stderr\": 0.019894879367175548,\n \"acc_norm\": 0.8174603174603174,\n \"acc_norm_stderr\": 0.019894879367175548\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.5396825396825397,\n \"acc_stderr\": 0.04458029125470973,\n \"acc_norm\": 0.5396825396825397,\n \"acc_norm_stderr\": 0.04458029125470973\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.65,\n \"acc_stderr\": 0.0479372485441102,\n \"acc_norm\": 0.65,\n \"acc_norm_stderr\": 0.0479372485441102\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.932258064516129,\n \"acc_stderr\": 0.014296101903893372,\n \"acc_norm\": 0.932258064516129,\n \"acc_norm_stderr\": 0.014296101903893372\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.7339901477832512,\n \"acc_stderr\": 0.03108982600293753,\n \"acc_norm\": 0.7339901477832512,\n \"acc_norm_stderr\": 0.03108982600293753\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.86,\n \"acc_stderr\": 0.034873508801977704,\n \"acc_norm\": 0.86,\n \"acc_norm_stderr\": 0.034873508801977704\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.9333333333333333,\n \"acc_stderr\": 0.019478290326359282,\n \"acc_norm\": 0.9333333333333333,\n \"acc_norm_stderr\": 0.019478290326359282\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.9494949494949495,\n \"acc_stderr\": 0.015602012491972255,\n \"acc_norm\": 0.9494949494949495,\n \"acc_norm_stderr\": 0.015602012491972255\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.9792746113989638,\n \"acc_stderr\": 0.010281417011909029,\n \"acc_norm\": 0.9792746113989638,\n \"acc_norm_stderr\": 0.010281417011909029\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.8641025641025641,\n \"acc_stderr\": 0.01737454649323547,\n \"acc_norm\": 0.8641025641025641,\n \"acc_norm_stderr\": 0.01737454649323547\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.6111111111111112,\n \"acc_stderr\": 0.029723278961476664,\n \"acc_norm\": 0.6111111111111112,\n \"acc_norm_stderr\": 0.029723278961476664\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.9369747899159664,\n \"acc_stderr\": 0.015785085223670926,\n \"acc_norm\": 0.9369747899159664,\n \"acc_norm_stderr\": 0.015785085223670926\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.6291390728476821,\n \"acc_stderr\": 0.03943966699183629,\n \"acc_norm\": 0.6291390728476821,\n \"acc_norm_stderr\": 0.03943966699183629\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.9559633027522936,\n \"acc_stderr\": 0.008796877218234045,\n \"acc_norm\": 0.9559633027522936,\n \"acc_norm_stderr\": 0.008796877218234045\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.7731481481481481,\n \"acc_stderr\": 0.028561650102422273,\n \"acc_norm\": 0.7731481481481481,\n \"acc_norm_stderr\": 0.028561650102422273\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.9509803921568627,\n \"acc_stderr\": 0.01515383934021268,\n \"acc_norm\": 0.9509803921568627,\n \"acc_norm_stderr\": 0.01515383934021268\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.9409282700421941,\n \"acc_stderr\": 0.015346597463888697,\n \"acc_norm\": 0.9409282700421941,\n \"acc_norm_stderr\": 0.015346597463888697\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.8430493273542601,\n \"acc_stderr\": 0.024413587174907412,\n \"acc_norm\": 0.8430493273542601,\n \"acc_norm_stderr\": 0.024413587174907412\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.9312977099236641,\n \"acc_stderr\": 0.022184936922745042,\n \"acc_norm\": 0.9312977099236641,\n \"acc_norm_stderr\": 0.022184936922745042\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.9338842975206612,\n \"acc_stderr\": 0.022683403691723312,\n \"acc_norm\": 0.9338842975206612,\n \"acc_norm_stderr\": 0.022683403691723312\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.9351851851851852,\n \"acc_stderr\": 0.023800937426629205,\n \"acc_norm\": 0.9351851851851852,\n \"acc_norm_stderr\": 0.023800937426629205\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.9386503067484663,\n \"acc_stderr\": 0.01885387414579323,\n \"acc_norm\": 0.9386503067484663,\n \"acc_norm_stderr\": 0.01885387414579323\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.6875,\n \"acc_stderr\": 0.043994650575715215,\n \"acc_norm\": 0.6875,\n \"acc_norm_stderr\": 0.043994650575715215\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.9223300970873787,\n \"acc_stderr\": 0.026501440784762752,\n \"acc_norm\": 0.9223300970873787,\n \"acc_norm_stderr\": 0.026501440784762752\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.9658119658119658,\n \"acc_stderr\": 0.011904341997629818,\n \"acc_norm\": 0.9658119658119658,\n \"acc_norm_stderr\": 0.011904341997629818\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.9,\n \"acc_stderr\": 0.030151134457776348,\n \"acc_norm\": 0.9,\n \"acc_norm_stderr\": 0.030151134457776348\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.946360153256705,\n \"acc_stderr\": 0.00805691182236487,\n \"acc_norm\": 0.946360153256705,\n \"acc_norm_stderr\": 0.00805691182236487\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.8410404624277457,\n \"acc_stderr\": 0.019685307033571946,\n \"acc_norm\": 0.8410404624277457,\n \"acc_norm_stderr\": 0.019685307033571946\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.8502793296089386,\n \"acc_stderr\": 0.011933090460111657,\n \"acc_norm\": 0.8502793296089386,\n \"acc_norm_stderr\": 0.011933090460111657\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.9019607843137255,\n \"acc_stderr\": 0.01702722293558219,\n \"acc_norm\": 0.9019607843137255,\n \"acc_norm_stderr\": 0.01702722293558219\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.887459807073955,\n \"acc_stderr\": 0.017949292186800647,\n \"acc_norm\": 0.887459807073955,\n \"acc_norm_stderr\": 0.017949292186800647\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.9228395061728395,\n \"acc_stderr\": 0.014847704893944928,\n \"acc_norm\": 0.9228395061728395,\n \"acc_norm_stderr\": 0.014847704893944928\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.7588652482269503,\n \"acc_stderr\": 0.02551873104953777,\n \"acc_norm\": 0.7588652482269503,\n \"acc_norm_stderr\": 0.02551873104953777\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.7672750977835724,\n \"acc_stderr\": 0.01079259555388848,\n \"acc_norm\": 0.7672750977835724,\n \"acc_norm_stderr\": 0.01079259555388848\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.9301470588235294,\n \"acc_stderr\": 0.015484012441056329,\n \"acc_norm\": 0.9301470588235294,\n \"acc_norm_stderr\": 0.015484012441056329\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.8774509803921569,\n \"acc_stderr\": 0.013266175773054252,\n \"acc_norm\": 0.8774509803921569,\n \"acc_norm_stderr\": 0.013266175773054252\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.8,\n \"acc_stderr\": 0.03831305140884601,\n \"acc_norm\": 0.8,\n \"acc_norm_stderr\": 0.03831305140884601\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.8857142857142857,\n \"acc_stderr\": 0.020367976491952145,\n \"acc_norm\": 0.8857142857142857,\n \"acc_norm_stderr\": 0.020367976491952145\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.9303482587064676,\n \"acc_stderr\": 0.018000052253856254,\n \"acc_norm\": 0.9303482587064676,\n \"acc_norm_stderr\": 0.018000052253856254\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.96,\n \"acc_stderr\": 0.01969463855669321,\n \"acc_norm\": 0.96,\n \"acc_norm_stderr\": 0.01969463855669321\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.6506024096385542,\n \"acc_stderr\": 0.037117251907407514,\n \"acc_norm\": 0.6506024096385542,\n \"acc_norm_stderr\": 0.037117251907407514\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.9415204678362573,\n \"acc_stderr\": 0.017996678857280124,\n \"acc_norm\": 0.9415204678362573,\n \"acc_norm_stderr\": 0.017996678857280124\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.390452876376989,\n \"mc1_stderr\": 0.017078230743431448,\n \"mc2\": 0.5652174373687721,\n \"mc2_stderr\": 0.015479461186777867\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.8074191002367798,\n \"acc_stderr\": 0.0110825388474919\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.6322971948445792,\n \"acc_stderr\": 0.013281630503395482\n }\n}\n```", "repo_url": "https://huggingface.co/AA051612/A0124", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_01_25T14_19_16.198603", "path": ["**/details_harness|arc:challenge|25_2024-01-25T14-19-16.198603.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-01-25T14-19-16.198603.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_01_25T14_19_16.198603", "path": ["**/details_harness|gsm8k|5_2024-01-25T14-19-16.198603.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-01-25T14-19-16.198603.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_01_25T14_19_16.198603", "path": ["**/details_harness|hellaswag|10_2024-01-25T14-19-16.198603.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-01-25T14-19-16.198603.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_01_25T14_19_16.198603", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-25T14-19-16.198603.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-25T14-19-16.198603.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-25T14-19-16.198603.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-25T14-19-16.198603.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-25T14-19-16.198603.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-25T14-19-16.198603.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-25T14-19-16.198603.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-25T14-19-16.198603.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-25T14-19-16.198603.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-25T14-19-16.198603.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-25T14-19-16.198603.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-25T14-19-16.198603.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-25T14-19-16.198603.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-25T14-19-16.198603.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-25T14-19-16.198603.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-25T14-19-16.198603.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-25T14-19-16.198603.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-25T14-19-16.198603.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-25T14-19-16.198603.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-25T14-19-16.198603.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-25T14-19-16.198603.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-25T14-19-16.198603.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-25T14-19-16.198603.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-25T14-19-16.198603.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-25T14-19-16.198603.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-25T14-19-16.198603.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-25T14-19-16.198603.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-25T14-19-16.198603.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-25T14-19-16.198603.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-25T14-19-16.198603.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-25T14-19-16.198603.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-25T14-19-16.198603.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-25T14-19-16.198603.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-25T14-19-16.198603.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-25T14-19-16.198603.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-25T14-19-16.198603.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-25T14-19-16.198603.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-25T14-19-16.198603.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-25T14-19-16.198603.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-25T14-19-16.198603.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-25T14-19-16.198603.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-25T14-19-16.198603.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-25T14-19-16.198603.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-25T14-19-16.198603.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-25T14-19-16.198603.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-25T14-19-16.198603.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-25T14-19-16.198603.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-25T14-19-16.198603.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-25T14-19-16.198603.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-25T14-19-16.198603.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-25T14-19-16.198603.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-25T14-19-16.198603.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-25T14-19-16.198603.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-25T14-19-16.198603.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-25T14-19-16.198603.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-25T14-19-16.198603.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-25T14-19-16.198603.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-25T14-19-16.198603.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-25T14-19-16.198603.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-25T14-19-16.198603.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-25T14-19-16.198603.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-25T14-19-16.198603.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-25T14-19-16.198603.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-25T14-19-16.198603.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-25T14-19-16.198603.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-25T14-19-16.198603.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-25T14-19-16.198603.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-25T14-19-16.198603.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-25T14-19-16.198603.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-25T14-19-16.198603.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-25T14-19-16.198603.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-25T14-19-16.198603.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-25T14-19-16.198603.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-25T14-19-16.198603.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-25T14-19-16.198603.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-25T14-19-16.198603.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-25T14-19-16.198603.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-25T14-19-16.198603.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-25T14-19-16.198603.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-25T14-19-16.198603.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-25T14-19-16.198603.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-25T14-19-16.198603.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-25T14-19-16.198603.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-25T14-19-16.198603.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-25T14-19-16.198603.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-25T14-19-16.198603.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-25T14-19-16.198603.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-25T14-19-16.198603.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-25T14-19-16.198603.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-25T14-19-16.198603.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-25T14-19-16.198603.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-25T14-19-16.198603.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-25T14-19-16.198603.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-25T14-19-16.198603.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-25T14-19-16.198603.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-25T14-19-16.198603.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-25T14-19-16.198603.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-25T14-19-16.198603.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-25T14-19-16.198603.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-25T14-19-16.198603.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-25T14-19-16.198603.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-25T14-19-16.198603.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-25T14-19-16.198603.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-25T14-19-16.198603.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-25T14-19-16.198603.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-25T14-19-16.198603.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-25T14-19-16.198603.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-25T14-19-16.198603.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-25T14-19-16.198603.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-25T14-19-16.198603.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-25T14-19-16.198603.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-25T14-19-16.198603.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-25T14-19-16.198603.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-25T14-19-16.198603.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_01_25T14_19_16.198603", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-25T14-19-16.198603.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-25T14-19-16.198603.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_01_25T14_19_16.198603", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-25T14-19-16.198603.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-25T14-19-16.198603.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_01_25T14_19_16.198603", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-25T14-19-16.198603.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-25T14-19-16.198603.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_01_25T14_19_16.198603", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-25T14-19-16.198603.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-25T14-19-16.198603.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_01_25T14_19_16.198603", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-25T14-19-16.198603.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-25T14-19-16.198603.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_01_25T14_19_16.198603", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-25T14-19-16.198603.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-25T14-19-16.198603.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_01_25T14_19_16.198603", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-25T14-19-16.198603.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-25T14-19-16.198603.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_01_25T14_19_16.198603", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-25T14-19-16.198603.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-25T14-19-16.198603.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_01_25T14_19_16.198603", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-25T14-19-16.198603.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-25T14-19-16.198603.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_01_25T14_19_16.198603", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-25T14-19-16.198603.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-25T14-19-16.198603.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_01_25T14_19_16.198603", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-25T14-19-16.198603.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-25T14-19-16.198603.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_01_25T14_19_16.198603", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-25T14-19-16.198603.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-25T14-19-16.198603.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_01_25T14_19_16.198603", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-25T14-19-16.198603.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-25T14-19-16.198603.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_01_25T14_19_16.198603", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-25T14-19-16.198603.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-25T14-19-16.198603.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_01_25T14_19_16.198603", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-25T14-19-16.198603.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-25T14-19-16.198603.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_01_25T14_19_16.198603", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-25T14-19-16.198603.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-25T14-19-16.198603.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_01_25T14_19_16.198603", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-25T14-19-16.198603.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-25T14-19-16.198603.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_01_25T14_19_16.198603", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-25T14-19-16.198603.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-25T14-19-16.198603.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_01_25T14_19_16.198603", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-25T14-19-16.198603.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-25T14-19-16.198603.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_01_25T14_19_16.198603", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-25T14-19-16.198603.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-25T14-19-16.198603.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_01_25T14_19_16.198603", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-25T14-19-16.198603.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-25T14-19-16.198603.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_01_25T14_19_16.198603", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-25T14-19-16.198603.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-25T14-19-16.198603.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_01_25T14_19_16.198603", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-25T14-19-16.198603.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-25T14-19-16.198603.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_01_25T14_19_16.198603", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-25T14-19-16.198603.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-25T14-19-16.198603.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_01_25T14_19_16.198603", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-25T14-19-16.198603.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-25T14-19-16.198603.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_01_25T14_19_16.198603", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-25T14-19-16.198603.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-25T14-19-16.198603.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_01_25T14_19_16.198603", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-25T14-19-16.198603.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-25T14-19-16.198603.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_01_25T14_19_16.198603", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-25T14-19-16.198603.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-25T14-19-16.198603.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_01_25T14_19_16.198603", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-25T14-19-16.198603.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-25T14-19-16.198603.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_01_25T14_19_16.198603", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-25T14-19-16.198603.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-25T14-19-16.198603.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_01_25T14_19_16.198603", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-25T14-19-16.198603.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-25T14-19-16.198603.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_01_25T14_19_16.198603", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-25T14-19-16.198603.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-25T14-19-16.198603.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_01_25T14_19_16.198603", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-25T14-19-16.198603.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-25T14-19-16.198603.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_01_25T14_19_16.198603", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-25T14-19-16.198603.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-25T14-19-16.198603.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_01_25T14_19_16.198603", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-25T14-19-16.198603.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-25T14-19-16.198603.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_01_25T14_19_16.198603", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-25T14-19-16.198603.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-25T14-19-16.198603.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_01_25T14_19_16.198603", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-25T14-19-16.198603.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-25T14-19-16.198603.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_01_25T14_19_16.198603", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-25T14-19-16.198603.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-25T14-19-16.198603.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_01_25T14_19_16.198603", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-25T14-19-16.198603.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-25T14-19-16.198603.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_01_25T14_19_16.198603", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-25T14-19-16.198603.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-25T14-19-16.198603.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_01_25T14_19_16.198603", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-25T14-19-16.198603.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-25T14-19-16.198603.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_01_25T14_19_16.198603", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-25T14-19-16.198603.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-25T14-19-16.198603.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_01_25T14_19_16.198603", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-25T14-19-16.198603.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-25T14-19-16.198603.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_01_25T14_19_16.198603", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-25T14-19-16.198603.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-25T14-19-16.198603.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_01_25T14_19_16.198603", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-25T14-19-16.198603.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-25T14-19-16.198603.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_01_25T14_19_16.198603", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-25T14-19-16.198603.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-25T14-19-16.198603.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_01_25T14_19_16.198603", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-25T14-19-16.198603.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-25T14-19-16.198603.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_01_25T14_19_16.198603", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-25T14-19-16.198603.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-25T14-19-16.198603.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_01_25T14_19_16.198603", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-25T14-19-16.198603.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-25T14-19-16.198603.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_01_25T14_19_16.198603", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-25T14-19-16.198603.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-25T14-19-16.198603.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_01_25T14_19_16.198603", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-25T14-19-16.198603.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-25T14-19-16.198603.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_01_25T14_19_16.198603", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-25T14-19-16.198603.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-25T14-19-16.198603.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_01_25T14_19_16.198603", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-25T14-19-16.198603.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-25T14-19-16.198603.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_01_25T14_19_16.198603", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-25T14-19-16.198603.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-25T14-19-16.198603.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_01_25T14_19_16.198603", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-25T14-19-16.198603.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-25T14-19-16.198603.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_01_25T14_19_16.198603", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-25T14-19-16.198603.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-25T14-19-16.198603.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_01_25T14_19_16.198603", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-25T14-19-16.198603.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-25T14-19-16.198603.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_01_25T14_19_16.198603", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-25T14-19-16.198603.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-25T14-19-16.198603.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_01_25T14_19_16.198603", "path": ["**/details_harness|winogrande|5_2024-01-25T14-19-16.198603.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-01-25T14-19-16.198603.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_01_25T14_19_16.198603", "path": ["results_2024-01-25T14-19-16.198603.parquet"]}, {"split": "latest", "path": ["results_2024-01-25T14-19-16.198603.parquet"]}]}]}
2024-01-25T14:22:07+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of AA051612/A0124 Dataset automatically created during the evaluation run of model AA051612/A0124 on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-01-25T14:19:16.198603(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of AA051612/A0124\n\n\n\nDataset automatically created during the evaluation run of model AA051612/A0124 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-25T14:19:16.198603(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of AA051612/A0124\n\n\n\nDataset automatically created during the evaluation run of model AA051612/A0124 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-25T14:19:16.198603(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ 6, 175, 67, 4, 40, 29, 3, 4, 9, 6, 5, 7, 4, 7, 10, 9, 5, 9, 8, 10, 46, 8, 7, 10, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of AA051612/A0124\n\n\n\nDataset automatically created during the evaluation run of model AA051612/A0124 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2024-01-25T14:19:16.198603(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):## Dataset Details### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:## Uses### Direct Use### Out-of-Scope Use## Dataset Structure## Dataset Creation### Curation Rationale### Source Data#### Data Collection and Processing#### Who are the source data producers?### Annotations [optional]#### Annotation process#### Who are the annotators?#### Personal and Sensitive Information## Bias, Risks, and Limitations### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:## Glossary [optional]## More Information [optional]## Dataset Card Authors [optional]## Dataset Card Contact" ]
[ -0.06498223543167114, 0.18116164207458496, -0.006472502835094929, 0.038245491683483124, 0.06027189642190933, 0.008626262657344341, 0.07211518287658691, 0.11018988490104675, 0.030486704781651497, 0.1528155654668808, 0.008805003948509693, 0.09742318093776703, 0.1114877313375473, 0.12626656889915466, 0.0036613657139241695, -0.13742907345294952, 0.0575476735830307, -0.08003979921340942, 0.09881710261106491, 0.07252790778875351, 0.08256617188453674, -0.08018606901168823, 0.06116179749369621, -0.030455946922302246, 0.0055761514231562614, -0.007761258631944656, -0.04620238393545151, -0.04897155612707138, 0.1070985198020935, 0.0719723179936409, 0.04271916672587395, -0.021272247657179832, 0.029706981033086777, -0.27857333421707153, 0.02266714908182621, 0.10361912846565247, -0.003423255868256092, 0.061279091984033585, 0.14514513313770294, -0.08464323729276657, 0.10028994828462601, -0.023735569790005684, 0.07536070793867111, 0.05022764578461647, -0.13544195890426636, -0.16368649899959564, -0.16142363846302032, 0.014366980642080307, 0.0720144659280777, 0.04367201402783394, -0.023110177367925644, 0.10023929923772812, -0.03258582204580307, 0.046002913266420364, 0.14361584186553955, -0.17919017374515533, -0.023414209485054016, 0.05062193423509598, 0.021020082756876945, 0.04278915375471115, -0.0881761834025383, -0.035535778850317, 0.0349186547100544, 0.05415143445134163, 0.01288091391324997, 0.012445000000298023, 0.06705259531736374, 0.013745707459747791, -0.13987816870212555, -0.12921251356601715, 0.1172683835029602, -0.019262105226516724, -0.05153367668390274, -0.15996524691581726, -0.055804137140512466, -0.032778650522232056, 0.017667166888713837, 0.023877941071987152, 0.017002200707793236, 0.005374673753976822, 0.0661616176366806, 0.0013021110789850354, -0.09393061697483063, -0.04342269524931908, -0.0581553652882576, 0.014125091023743153, 0.034877363592386246, 0.004020357504487038, -0.0026940300595015287, 0.14592979848384857, 0.024378512054681778, -0.0709131583571434, -0.10209713876247406, -0.04222646728157997, -0.12455909699201584, -0.04099272936582565, 0.01394741516560316, -0.040706533938646317, 0.043672382831573486, 0.2518312335014343, -0.06163591518998146, 0.03237273544073105, -0.08092856407165527, 0.01954093761742115, 0.11350780725479126, 0.0805739015340805, -0.047832369804382324, -0.06196046993136406, -0.04158861190080643, 0.027466662228107452, 0.028044091537594795, -0.032767657190561295, 0.029602374881505966, 0.06320952624082565, 0.03513336181640625, 0.12987861037254333, 0.11939458549022675, 0.019213959574699402, -0.07255131751298904, -0.011484171263873577, 0.123356394469738, -0.18315087258815765, 0.003739801002666354, 0.022154012694954872, -0.03461340069770813, -0.08327459543943405, 0.06075037270784378, -0.02329912781715393, -0.07381249964237213, 0.09686145186424255, -0.06698387861251831, -0.058703694492578506, -0.10741107910871506, -0.07176043838262558, 0.037442754954099655, -0.034838445484638214, -0.06357350945472717, -0.03797348216176033, -0.15191961824893951, -0.08814060688018799, 0.03238861635327339, -0.07495308667421341, -0.01644321344792843, 0.0009816911770030856, 0.027021553367376328, -0.023560602217912674, -0.005669598467648029, 0.09806288778781891, -0.07093746960163116, 0.04191320389509201, -0.036322783678770065, 0.04267897456884384, 0.084983229637146, 0.033961035311222076, -0.13765592873096466, 0.09745954722166061, -0.07249648123979568, 0.1053362712264061, -0.06644216179847717, -0.02911643497645855, -0.11931180953979492, 0.009909669868648052, -0.009307746775448322, 0.01825678162276745, -0.002040733117610216, 0.08997879922389984, -0.2287931740283966, -0.006916534621268511, 0.17234496772289276, -0.13999128341674805, -0.0753343477845192, 0.05243999883532524, -0.034261517226696014, 0.05489204823970795, 0.049837421625852585, 0.06686994433403015, 0.07031747698783875, -0.0512382909655571, -0.10225366801023483, -0.06873265653848648, -0.04125728830695152, 0.1389671415090561, 0.06638740003108978, -0.08609924465417862, 0.09485384076833725, 0.02271752804517746, 0.0066904351115226746, -0.06802719086408615, -0.017453104257583618, -0.06070293113589287, 0.0006556377047672868, -0.008282647468149662, -0.11029292643070221, -0.025204315781593323, -0.10847500711679459, -0.010864379815757275, -0.06013169884681702, 0.005824308842420578, 0.09354399144649506, -0.031464584171772, 0.02600955404341221, -0.09235255420207977, 0.07584401220083237, -0.014843489043414593, 0.01832057535648346, -0.20784813165664673, -0.08306223154067993, 0.02240971475839615, -0.1644381284713745, 0.050583939999341965, 0.009228191338479519, 0.008296738378703594, 0.04854920133948326, 0.0015113473637029529, 0.017442891374230385, 0.02456429973244667, -0.016161710023880005, -0.0017759059555828571, -0.16656477749347687, -0.03357900306582451, -0.06707627326250076, 0.08564640581607819, -0.11913340538740158, -0.014015370048582554, 0.10460791736841202, 0.14143331348896027, 0.006130005232989788, -0.07740854471921921, 0.037031278014183044, 0.014601296745240688, -0.06873784959316254, -0.06398115307092667, 0.010239786468446255, 0.005611920263618231, 0.0220456775277853, 0.0781734511256218, -0.20601411163806915, -0.16858026385307312, 0.09053812175989151, 0.11214597523212433, -0.07821757346391678, -0.09477010369300842, -0.06258809566497803, -0.05110466480255127, -0.10334274917840958, -0.03976256772875786, 0.080177441239357, 0.0846378430724144, 0.06019896641373634, -0.06982196867465973, -0.06266611069440842, -0.010921869426965714, 0.030910437926650047, -0.075093574821949, 0.09600356221199036, 0.08398492634296417, -0.09963531792163849, 0.1060151755809784, -0.012445521540939808, 0.11397617310285568, 0.09881910681724548, 0.0022542455699294806, -0.1162722110748291, -0.018980026245117188, 0.0629739984869957, 0.04303630441427231, 0.08295925706624985, -0.0029151099734008312, 0.050273310393095016, 0.08264706283807755, 0.004516890272498131, 0.030114011839032173, -0.08913228660821915, 0.032643552869558334, 0.045773569494485855, -0.009342574514448643, -0.017100993543863297, -0.0027968056965619326, 0.026479845866560936, 0.092876136302948, 0.012639787979424, 0.06400356441736221, -0.009300320409238338, -0.04909970238804817, -0.09698078036308289, 0.1584055870771408, -0.09068500250577927, -0.20191329717636108, -0.16287429630756378, -0.07238934934139252, -0.016513772308826447, -0.0028321347199380398, 0.05078843608498573, 0.01092164684087038, -0.09465128928422928, -0.11101553589105606, 0.041317109018564224, 0.031742773950099945, -0.10947112739086151, -0.04469484090805054, 0.01347749400883913, 0.005367185454815626, -0.16466547548770905, 0.021254515275359154, 0.02860710397362709, -0.07864060252904892, 0.024915337562561035, 0.08432350307703018, 0.11008306592702866, 0.1145869567990303, 0.06014498323202133, -0.013791915960609913, -0.014098158106207848, 0.18237115442752838, -0.10454405844211578, 0.02111322619020939, 0.10465773940086365, -0.057396382093429565, 0.0829068124294281, 0.1557319015264511, 0.006574610713869333, -0.08762937039136887, 0.03132465481758118, 0.09024950116872787, -0.06538283824920654, -0.27792444825172424, -0.053064022213220596, -0.026805724948644638, 0.05300506576895714, 0.11760398000478745, 0.07681768387556076, -0.015271022915840149, 0.02824196219444275, -0.11768441647291183, -0.03655993193387985, -0.025445466861128807, 0.07424047589302063, 0.06342363357543945, -0.0024704139214009047, 0.055645957589149475, -0.06135448068380356, 0.04449651762843132, 0.12624678015708923, 0.025776168331503868, 0.18444764614105225, -0.056015320122241974, 0.18248069286346436, 0.11041653901338577, 0.0723540410399437, -0.013807391747832298, 0.08581576496362686, -0.0223704781383276, 0.07137167453765869, -0.013607315719127655, -0.08468564599752426, -0.03361910209059715, 0.08914245665073395, 0.012756009586155415, -0.014277013950049877, 0.04779370129108429, -0.05489902198314667, 0.05804038047790527, 0.259368896484375, 0.023061852902173996, -0.1813710480928421, -0.029850974678993225, 0.04608521983027458, -0.03801058977842331, -0.10984314978122711, -0.005408324301242828, 0.06654824316501617, -0.14785648882389069, 0.052330415695905685, -0.03394865244626999, 0.08238949626684189, -0.1568441540002823, -0.03628070279955864, 0.007255555596202612, 0.06779633462429047, -0.04461521655321121, 0.0986606553196907, -0.19513554871082306, 0.09894406050443649, -0.010969345457851887, 0.028518319129943848, -0.0708402618765831, 0.07337436079978943, -0.0012884517200291157, -0.07096557319164276, 0.13218745589256287, -0.0008575940155424178, -0.12410635501146317, -0.06386379897594452, -0.0988532081246376, -0.012011620216071606, 0.026658743619918823, -0.1029883399605751, 0.12204641848802567, -0.0017707300139591098, -0.005182011052966118, -0.047483112663030624, -0.010775420814752579, -0.06048502027988434, -0.2077820748090744, 0.08676496148109436, -0.10309551656246185, 0.061414822936058044, -0.050823796540498734, -0.037087518721818924, -0.03745005652308464, 0.14800313115119934, -0.161370649933815, -0.07491821050643921, -0.11126764118671417, -0.008464633487164974, 0.1239144429564476, -0.06770990043878555, 0.06179294362664223, -0.044026706367731094, 0.18118108808994293, -0.028574524447321892, -0.05177969112992287, 0.01727023348212242, -0.06549098342657089, -0.17793937027454376, -0.036861028522253036, 0.11834699660539627, 0.06409750133752823, 0.012036952190101147, 0.0021916539408266544, 0.07416612654924393, -0.010741199366748333, -0.09543605148792267, 0.051491741091012955, 0.10150934010744095, 0.10505734384059906, 0.05861821398139, -0.023619724437594414, -0.10534416139125824, -0.09072409570217133, -0.08297333866357803, 0.05992012843489647, 0.20332324504852295, -0.04236159846186638, 0.11462464928627014, 0.12035322189331055, -0.11318689584732056, -0.19876675307750702, -0.07388332486152649, -0.034363847225904465, -0.009904826991260052, 0.12022402882575989, -0.20295213162899017, 0.034202661365270615, 0.08909602463245392, -0.018801789730787277, 0.11793018132448196, -0.2374485731124878, -0.13121932744979858, 0.06944816559553146, 0.04763238877058029, -0.1473224014043808, -0.1165294274687767, -0.09451413154602051, -0.03031013160943985, -0.10891284048557281, 0.11079463362693787, -0.026215165853500366, 0.04229756072163582, -0.018862735480070114, 0.0527074933052063, 0.041990313678979874, -0.05265373736619949, 0.14961211383342743, -0.025048086419701576, 0.053020209074020386, -0.09364692866802216, -0.013325807638466358, -0.018303312361240387, -0.04700774326920509, 0.06237995624542236, -0.007112325634807348, 0.03633268177509308, -0.034429073333740234, -0.05247741565108299, -0.026091966778039932, 0.031900808215141296, -0.07279520481824875, -0.05957590788602829, -0.06360077112913132, 0.06552514433860779, 0.06539663672447205, -0.022035783156752586, 0.013909758068621159, -0.060396548360586166, 0.05657831206917763, 0.2217491865158081, 0.08199238777160645, 0.06597316265106201, -0.1033342257142067, -0.04218870773911476, -0.0046213106252253056, 0.013472194783389568, -0.09763595461845398, 0.05041513219475746, 0.09105801582336426, 0.03545878082513809, 0.11627361178398132, 0.006556802429258823, -0.2050280123949051, -0.0037887608632445335, 0.0791141614317894, -0.1001398041844368, -0.18040141463279724, 0.049327507615089417, 0.046315502375364304, -0.12583968043327332, -0.08819275349378586, 0.08057012408971786, 0.030428288504481316, -0.016056878492236137, 0.022097185254096985, 0.07666477560997009, 0.03555683419108391, 0.09732773154973984, -0.03087557479739189, 0.052455734461545944, -0.08537887036800385, 0.1288137137889862, 0.13633859157562256, -0.1128150001168251, -0.0033449740149080753, 0.06795370578765869, -0.03721516579389572, -0.05426948517560959, -0.04074757918715477, 0.05733072757720947, -0.023003336042165756, -0.05369932949542999, -0.02947583980858326, -0.08765324205160141, 0.08198672533035278, 0.15054208040237427, 0.0025401622988283634, 0.09063281118869781, 0.024058587849140167, -0.01303945668041706, -0.053062696009874344, 0.1161547601222992, 0.028404289856553078, 0.039849527180194855, -0.019259298220276833, 0.043587833642959595, 0.01871572807431221, -0.014573418535292149, 0.020576687529683113, -0.06403427571058273, -0.04495469853281975, 0.014477980323135853, -0.16310422122478485, 0.029877446591854095, -0.08525637537240982, -0.01768643409013748, 0.010108046233654022, 0.004763900768011808, -0.0004564268165268004, 0.019336886703968048, -0.04780491814017296, -0.04813085496425629, -0.05358685553073883, 0.11269381642341614, -0.20636817812919617, 0.0071023814380168915, 0.08138780295848846, -0.08383392542600632, 0.08472198992967606, -0.011487302370369434, -0.02230238728225231, -0.00575623894110322, -0.08410052955150604, -0.00916530005633831, -0.030228983610868454, 0.029985379427671432, 0.011576955206692219, -0.16306711733341217, -0.014118410646915436, 0.007170873694121838, -0.11197910457849503, -0.0032776538282632828, 0.007851074449717999, -0.14024890959262848, 0.06273385882377625, 0.09840043634176254, -0.04198608547449112, -0.03458951786160469, 0.03186053782701492, 0.047344330698251724, 0.017265476286411285, 0.10413643717765808, -0.02189044840633869, 0.03824963793158531, -0.1663537323474884, -0.03037077933549881, 0.00829344056546688, 0.0015051589580252767, 0.02582179196178913, 0.006702662445604801, 0.03975633904337883, -0.02155294641852379, 0.2328381985425949, -0.020555350929498672, -0.0476096011698246, 0.0360867977142334, 0.024730736389756203, -0.06616602838039398, 0.03366928920149803, 0.023383673280477524, 0.004673093091696501, 0.009218762628734112, 0.008456171490252018, -0.020438557490706444, -0.04920895770192146, 0.029500124976038933, 0.10684755444526672, 0.10147117078304291, 0.23390412330627441, -0.038498565554618835, 0.03502340987324715, -0.1399565488100052, -0.047957342118024826, 0.011123226955533028, -0.043486956506967545, 0.04254281148314476, -0.04434526711702347, 0.060470100492239, 0.13458047807216644, -0.13562145829200745, 0.12834283709526062, -0.04719451442360878, -0.025378039106726646, -0.0829015001654625, -0.19897598028182983, -0.03627670183777809, 0.019203923642635345, -0.0009726673015393317, -0.09303683042526245, 0.10460728406906128, 0.16238656640052795, 0.012003082782030106, -0.0074151307344436646, 0.05987213924527168, -0.11503904312849045, -0.06284985691308975, -0.04395579174160957, 0.030905315652489662, 0.03941668942570686, 0.015770575031638145, 0.04701393470168114, 0.01979437842965126, 0.057294122874736786, 0.08823346346616745, 0.08636872470378876, 0.09133052080869675, 0.058128681033849716, -0.036208271980285645, -0.052366361021995544, 0.0027901630382984877, -0.024705717340111732, -0.03821196034550667, 0.172247976064682, 0.05305205658078194, 0.03528902679681778, 0.02706916071474552, 0.20784203708171844, -0.007218469865620136, -0.06888480484485626, -0.1274917721748352, 0.1641479879617691, -0.0009103143820539117, 0.016723783686757088, 0.030946923419833183, -0.12636838853359222, 0.029394691810011864, 0.13319112360477448, 0.09480544924736023, 0.015543879941105843, 0.010513692162930965, 0.028801916167140007, 0.02212776429951191, -0.02479376085102558, 0.018985571339726448, 0.029320282861590385, 0.19168895483016968, -0.05000936612486839, 0.09343563765287399, -0.03130846470594406, -0.014376124367117882, -0.017909886315464973, 0.11280260235071182, -0.04451232776045799, 0.02317390777170658, -0.06727071106433868, 0.0753939226269722, -0.04561075195670128, -0.22439630329608917, 0.0310797281563282, -0.03540223836898804, -0.1285577267408371, -0.001812175614759326, 0.03951924666762352, -0.019735949113965034, 0.03539024293422699, 0.03472853824496269, -0.013032474555075169, 0.17543654143810272, 0.009688884019851685, -0.06689652800559998, -0.09524552524089813, 0.08058827370405197, -0.04884733632206917, 0.2837468981742859, -0.0005761007778346539, 0.02882629819214344, 0.0842304602265358, -0.0009577361634001136, -0.14215277135372162, 0.05352466553449631, 0.08004118502140045, -0.05202658846974373, 0.043752238154411316, 0.1184585765004158, -0.01267128437757492, 0.11236388981342316, 0.059122126549482346, -0.00048505185986869037, 0.07053396850824356, 0.03361644968390465, -0.006994048599153757, -0.07715509086847305, 0.04803186282515526, -0.07874666899442673, 0.11863558739423752, 0.13553006947040558, -0.02370520867407322, 0.010424649342894554, -0.05514011159539223, 0.041118595749139786, -0.040204379707574844, 0.0874374583363533, 0.010053852573037148, -0.13230076432228088, 0.07018798589706421, 0.03923213854432106, 0.06666833907365799, -0.20236586034297943, -0.0741497203707695, 0.08563385158777237, -0.05849850922822952, -0.032811250537633896, 0.09234341233968735, 0.03784411400556564, 0.03492789715528488, -0.06211486831307411, -0.08716443181037903, 0.020680250599980354, 0.09807975590229034, -0.053848907351493835, -0.05033571645617485 ]
6ce552c3e350e222da21a65f044554d72393c415
this is a customized version of the [The Movies Dataset](https://www.kaggle.com/datasets/rounakbanik/the-movies-dataset)
not-lain/movies
[ "size_categories:10K<n<100K", "license:cc0-1.0", "region:us" ]
2024-01-25T14:24:09+00:00
{"license": "cc0-1.0", "size_categories": ["10K<n<100K"]}
2024-01-29T13:18:52+00:00
[]
[]
TAGS #size_categories-10K<n<100K #license-cc0-1.0 #region-us
this is a customized version of the The Movies Dataset
[]
[ "TAGS\n#size_categories-10K<n<100K #license-cc0-1.0 #region-us \n" ]
[ 26 ]
[ "passage: TAGS\n#size_categories-10K<n<100K #license-cc0-1.0 #region-us \n" ]
[ -0.06437023729085922, 0.031103570014238358, -0.002562063978984952, 0.026393204927444458, 0.048278044909238815, 0.05247299745678902, 0.1628117859363556, 0.08891388773918152, 0.19482281804084778, 0.023326771333813667, 0.13960379362106323, 0.05126321688294411, 0.05158315598964691, 0.14092527329921722, -0.06262454390525818, -0.2472933977842331, 0.06383553892374039, 0.028136758133769035, 0.10443150252103806, 0.041699156165122986, 0.0344228558242321, -0.11672624945640564, 0.02099740505218506, -0.07244233787059784, -0.10605526715517044, -0.010576914995908737, 0.04769761860370636, -0.07885042577981949, 0.043358512222766876, -0.07692785561084747, 0.09898713231086731, 0.0811784416437149, 0.045028313994407654, -0.19080041348934174, -0.015410144813358784, -0.07192640751600266, -0.09235456585884094, 0.05600964277982712, 0.04959480091929436, 0.0806543380022049, 0.015724139288067818, -0.07346167415380478, -0.05084729194641113, 0.0663342997431755, -0.14935335516929626, -0.1933920830488205, -0.11555806547403336, 0.07355707138776779, 0.03678279370069504, 0.019745850935578346, 0.03712591156363487, 0.11507058143615723, -0.19977544248104095, -0.01792318932712078, 0.16096873581409454, -0.4035186767578125, 0.08415105938911438, 0.3278721570968628, 0.03578045591711998, 0.08345860987901688, -0.07171569764614105, 0.02241837978363037, 0.1336347758769989, -0.04311280697584152, -0.012994390912353992, -0.05039103701710701, -0.01790497452020645, 0.07020927220582962, -0.016602955758571625, -0.04672859609127045, 0.3327217996120453, 0.10236425697803497, -0.00254239933565259, -0.016106976196169853, -0.021824581548571587, -0.2451191246509552, -0.049210213124752045, 0.11936482042074203, 0.09118105471134186, 0.09500694274902344, 0.033867280930280685, 0.07009420543909073, -0.12897589802742004, -0.05879141017794609, -0.23051850497722626, 0.09595416486263275, -0.023676849901676178, 0.09135833382606506, -0.06400489807128906, -0.01062670536339283, -0.3352784514427185, -0.05747128278017044, -0.04461025446653366, -0.051183681935071945, -0.018548142164945602, -0.025328895077109337, -0.04965784400701523, 0.17593571543693542, 0.09963680803775787, 0.12078733742237091, 0.0011346508981660008, 0.0008441066020168364, 0.0608489029109478, 0.12913623452186584, -0.02353651449084282, 0.11099554598331451, 0.02009294554591179, 0.03213284909725189, 0.003035941394045949, -0.08960463106632233, 0.09405805915594101, -0.024178629741072655, -0.20065441727638245, -0.11923033744096756, -0.0692855641245842, 0.11569881439208984, -0.09152652323246002, -0.027653012424707413, -0.06280791759490967, 0.07628871500492096, 0.1276521384716034, -0.004094863776117563, 0.012304761447012424, 0.014907123520970345, 0.0014211758971214294, 0.02805846929550171, -0.15672658383846283, -0.011468479409813881, 0.10824848711490631, 0.13051925599575043, -0.1304682344198227, -0.015148337930440903, 0.0008205053163692355, -0.051179178059101105, 0.127007856965065, -0.16346506774425507, 0.017058096826076508, -0.12637877464294434, -0.12145819514989853, 0.049690213054418564, -0.031537722796201706, -0.03132430836558342, 0.04244425147771835, 0.03433381766080856, -0.051075391471385956, -0.051851991564035416, -0.05232471600174904, -0.07481782883405685, -0.0777597427368164, 0.08594519644975662, -0.03376856818795204, 0.09531378000974655, -0.26460763812065125, -0.03954209014773369, -0.11623305827379227, 0.06688369810581207, -0.0580710731446743, -0.07467179000377655, -0.09036023169755936, 0.16127513349056244, -0.037638477981090546, -0.03625348582863808, -0.1558728814125061, -0.039009906351566315, -0.03296273201704025, 0.07085549086332321, -0.2425791174173355, -0.0203928891569376, 0.131629079580307, -0.03638775646686554, -0.0959373340010643, -0.0014770935522392392, 0.03060534968972206, -0.0267667006701231, 0.08423536270856857, 0.33671584725379944, -0.054806068539619446, -0.041162941604852676, -0.04050010070204735, 0.20066210627555847, -0.11808113008737564, -0.354496568441391, 0.13466958701610565, -0.17056281864643097, -0.05619249492883682, 0.027028458192944527, 0.04216018691658974, 0.004013492725789547, -0.028461428359150887, -0.052834589034318924, -0.047965288162231445, -0.04763302579522133, 0.07534018903970718, 0.006975346244871616, 0.08524398505687714, -0.06897309422492981, 0.04913830757141113, -0.0015589555259793997, 0.019229475408792496, 0.08819835633039474, -0.0027457932010293007, -0.03782930225133896, 0.11112643778324127, -0.10054221749305725, -0.02056507021188736, -0.07445396482944489, 0.009827218018472195, 0.08638881146907806, -0.06741927564144135, 0.03676644340157509, 0.19532158970832825, 0.025297824293375015, -0.05413399264216423, -0.025244206190109253, 0.11143080145120621, 0.03574005886912346, 0.06594366580247879, -0.08129376918077469, 0.009455263614654541, 0.005782879423350096, 0.008654143661260605, -0.05287168174982071, -0.03482750430703163, 0.01363807637244463, 0.20721130073070526, -0.07099267840385437, -0.01808864437043667, 0.07527504116296768, -0.027155913412570953, 0.016378946602344513, 0.022133223712444305, -0.027776213362812996, 0.09133844822645187, -0.0081625459715724, -0.12484858185052872, 0.06866448372602463, -0.11078489571809769, 0.09663745760917664, 0.19870024919509888, 0.008840754628181458, 0.028521884232759476, -0.11900674551725388, 0.002975632669404149, 0.023660358041524887, 0.02463429793715477, -0.017843572422862053, -0.08680109679698944, -0.006032161880284548, 0.06759341806173325, -0.0754682719707489, -0.0296362042427063, 0.020867904648184776, -0.03332428261637688, -0.039303336292505264, 0.011671819724142551, 0.24882163107395172, -0.127066507935524, 0.14564956724643707, 0.3834472894668579, 0.03704020753502846, 0.059347499161958694, -0.12197543680667877, -0.04071380943059921, -0.06437350809574127, 0.00856426078826189, -0.03377165272831917, 0.1693257987499237, -0.04771184176206589, -0.001572148990817368, 0.06819091737270355, 0.04506855085492134, 0.060985010117292404, -0.16486530005931854, -0.09971928596496582, -0.022462792694568634, -0.058404143899679184, -0.18697451055049896, 0.05486810579895973, -0.012654264457523823, 0.04241742193698883, 0.06510811299085617, -0.07188490033149719, 0.11001136898994446, -0.02981802448630333, -0.042449332773685455, 0.1279667764902115, -0.12124505639076233, -0.042800307273864746, -0.14449982345104218, 0.02283814735710621, 0.023997187614440918, 0.06786778569221497, 0.03108092211186886, -0.0917290672659874, -0.042170632630586624, -0.01919163018465042, -0.06592536717653275, -0.11563118547201157, 0.018125636503100395, 0.005589107517153025, 0.09917661547660828, -0.09038273990154266, -0.10163634270429611, -0.0732136145234108, -0.03753664717078209, 0.030581790953874588, 0.06583190709352493, -0.11437681317329407, 0.07043811678886414, 0.27044814825057983, -0.05218709260225296, 0.05518387630581856, -0.06126189976930618, 0.0892859697341919, -0.005483055952936411, -0.008982487954199314, 0.06678477674722672, 0.10544735938310623, 0.03979412838816643, 0.2627376914024353, 0.1082996055483818, -0.09594587981700897, -0.03514588996767998, -0.07535263150930405, -0.14922000467777252, -0.17218713462352753, -0.04875818267464638, -0.11870495229959488, 0.10139257460832596, 0.1052638366818428, 0.07579700648784637, 0.06266367435455322, 0.10152502357959747, 0.050373759120702744, 0.09414078295230865, -0.038851626217365265, 0.07089395076036453, 0.2115548551082611, 0.001999122090637684, 0.004459595773369074, -0.15076957643032074, 0.00999080017209053, 0.12175345420837402, 0.10679468512535095, 0.22628512978553772, 0.25167953968048096, 0.2527265250682831, 0.0614858977496624, 0.08074008673429489, 0.15826351940631866, 0.12775546312332153, 0.10928811877965927, -0.024740375578403473, -0.03171325847506523, -0.03304193541407585, 0.09456183761358261, 0.022166162729263306, 0.03937716409564018, -0.13884201645851135, -0.04410906881093979, -0.2131284773349762, 0.041023168712854385, 0.003886388847604394, 0.2087520956993103, -0.16512253880500793, 0.08407556265592575, 0.06897668540477753, 0.06691128760576248, -0.028437403962016106, 0.10411570221185684, 0.042401961982250214, -0.03403538838028908, 0.0448731854557991, 0.018081102520227432, 0.07060254365205765, -0.005213384050875902, 0.0398871973156929, -0.022149892523884773, -0.10330858826637268, 0.02286255545914173, 0.11305728554725647, -0.15088053047657013, 0.2837642729282379, 0.04072735458612442, -0.02769746072590351, -0.09025998413562775, -0.08873394131660461, -0.022947244346141815, 0.07837788015604019, 0.06632505357265472, 0.04547800123691559, -0.14054231345653534, -0.18433347344398499, -0.10010354965925217, -0.036277420818805695, 0.08442138880491257, -0.012467112392187119, -0.06735024601221085, -0.0144032659009099, 0.024595938622951508, 0.01179592963308096, 0.0019302308792248368, -0.06213228777050972, -0.058009933680295944, -0.011418815702199936, 0.19132502377033234, -0.06423462927341461, -0.04547994211316109, 0.047569453716278076, -0.07722152024507523, 0.04328598827123642, -0.20523087680339813, -0.027851607650518417, -0.059111159294843674, -0.13382798433303833, 0.13144446909427643, -0.03439498692750931, 0.04760419577360153, -0.050352249294519424, -0.036064326763153076, -0.10679168254137039, -0.12655922770500183, 0.11856425553560257, -0.021986395120620728, -0.05096116289496422, -0.04470682516694069, 0.13481248915195465, -0.15879668295383453, 0.06756909191608429, 0.018945368006825447, 0.048861097544431686, -0.001928951358422637, -0.14646273851394653, 0.01902102306485176, -0.06245351582765579, 0.07576889544725418, 0.061642907559871674, 0.01824922487139702, 0.017191121354699135, 0.06052368879318237, -0.018920304253697395, 0.1582942008972168, 0.3618565797805786, -0.11949922889471054, 0.0971960574388504, 0.22198623418807983, 0.01014439295977354, -0.21531203389167786, -0.048471953719854355, -0.2389160543680191, -0.06390304118394852, 0.017901066690683365, 0.0012592405546456575, 0.0594334602355957, 0.20039524137973785, -0.08708438277244568, 0.23312664031982422, -0.2347453385591507, -0.08328911662101746, 0.10983189195394516, -0.031733524054288864, 0.40246862173080444, -0.12312784045934677, -0.12318175286054611, -0.13212387263774872, -0.1757497489452362, 0.11153468489646912, -0.0035519362427294254, 0.10174719244241714, -0.07343903928995132, -0.010432351380586624, -0.00296113477088511, -0.05055471882224083, 0.21546949446201324, 0.09981057047843933, 0.11030637472867966, -0.07900168001651764, -0.23528514802455902, 0.28221040964126587, -0.01938186027109623, 0.053303275257349014, -0.08915258944034576, -0.051234666258096695, -0.010861567221581936, 0.005080484319478273, -0.00889293011277914, 0.03618679195642471, 0.0042856414802372456, -0.08648672699928284, -0.14841079711914062, -0.019565589725971222, -0.10605910420417786, -0.04709189012646675, 0.20160003006458282, 0.008844440802931786, -0.03167341649532318, 0.08173216134309769, 0.0005321244243532419, -0.07657596468925476, -0.014772973023355007, 0.010534530505537987, -0.08944110572338104, 0.07447902113199234, -0.19927921891212463, -0.029020046815276146, 0.14036640524864197, 0.015539705753326416, 0.035421326756477356, 0.09670481830835342, -0.04742386192083359, -0.011311234906315804, 0.17517104744911194, -0.016665596514940262, -0.018952064216136932, 0.06750421971082687, -0.027833763509988785, 0.008908369578421116, 0.0396384559571743, -0.09157303720712662, 0.0012747581349685788, 0.03593190014362335, 0.002227537799626589, 0.0013968980638310313, -0.09427010267972946, 0.03617521747946739, 0.08664445579051971, 0.04661887511610985, -0.12818104028701782, 0.17419666051864624, 0.05799813196063042, -0.0009089390514418483, -0.11111370474100113, 0.05096781253814697, -0.13602182269096375, -0.05731714516878128, -0.013272726908326149, -0.05808306857943535, -0.08583787828683853, -0.06982510536909103, 0.0018438746919855475, -0.15611925721168518, -0.004246755503118038, 0.06470578163862228, 0.05929725244641304, 0.1255849152803421, 0.03778575360774994, -0.0524849034845829, 0.007479272782802582, -0.0662115290760994, -0.1528739035129547, 0.0780227929353714, -0.13790184259414673, -0.025154078379273415, -0.03301873058080673, 0.0322825126349926, -0.026174338534474373, 0.017069745808839798, -0.13408246636390686, 0.027473993599414825, -0.10796979069709778, 0.05514121800661087, -0.11009125411510468, -0.04182421788573265, 0.01849593035876751, 0.025721045210957527, -0.0493474155664444, -0.04371325671672821, -0.14513105154037476, 0.010232552886009216, -0.012248953804373741, 0.06353593617677689, 0.0069202552549541, -0.020972641184926033, 0.029454903677105904, 0.009817549027502537, 0.07649905979633331, -0.007047533057630062, 0.055379923433065414, 0.09784805029630661, -0.07402648031711578, -0.037078987807035446, 0.12792488932609558, 0.01884640008211136, 0.02943635731935501, 0.12714067101478577, -0.022160576656460762, 0.048889048397541046, 0.05116334930062294, 0.06617315113544464, -0.12823261320590973, -0.14800980687141418, -0.09782468527555466, -0.08408938348293304, -0.14261594414710999, -0.012856962159276009, -0.11740335077047348, 0.20221251249313354, -0.003696694504469633, 0.1185087338089943, 0.05462491512298584, -0.00979509949684143, -0.055888134986162186, -0.02374790981411934, -0.05135812610387802, -0.08199524879455566, -0.0818309560418129, -0.025260526686906815, 0.0008774787420406938, 0.018231265246868134, 0.3789287805557251, 0.018146920949220657, -0.1545863151550293, 0.021648505702614784, 0.12906098365783691, 0.03396914154291153, -0.004621470347046852, 0.4643048644065857, 0.11051163822412491, -0.06745180487632751, -0.1309731900691986, 0.07508727163076401, -0.0012720536906272173, -0.15191979706287384, 0.1598144918680191, 0.1106484979391098, -0.09662842750549316, 0.07408001273870468, 0.09599026292562485, -0.09820311516523361, 0.03180976212024689, 0.10398157685995102, 0.05662659928202629, -0.0025909224059432745, 0.0044464897364377975, 0.06899288296699524, 0.14629939198493958, -0.1052621528506279, 0.008532760664820671, -0.043777938932180405, -0.031504906713962555, -0.1657995581626892, -0.2066074013710022, -0.04728600010275841, -0.13590051233768463, 0.055110517889261246, 0.02389182709157467, 0.013491061516106129, 0.2488022893667221, -0.009025058709084988, -0.012665117159485817, -0.010223128832876682, -0.21902309358119965, -0.0157724991440773, 0.027912048622965813, -0.0030417959205806255, -0.04298436641693115, -0.0708942711353302, -0.07505511492490768, 0.01991858147084713, -0.19752250611782074, -0.006654817145317793, 0.023869303986430168, -0.010658993385732174, -0.03730415180325508, -0.05802370235323906, -0.04495396465063095, -0.05022541806101799, 0.03460659086704254, 0.012873242609202862, 0.1693125218153, 0.006723763421177864, -0.020241880789399147, 0.029631154611706734, 0.04301946237683296, -0.04846520721912384, -0.14734245836734772, 0.063105009496212, 0.11073318868875504, 0.053627099841833115, 0.08693669736385345, -0.05358026176691055, -0.13057254254817963, 0.010170421563088894, 0.1613873541355133, 0.23370513319969177, 0.022104907780885696, -0.008770110085606575, -0.0267772376537323, -0.005574603099375963, 0.04246079921722412, 0.16649027168750763, -0.024543773382902145, 0.14510293304920197, -0.031159639358520508, 0.024930693209171295, 0.007263816427439451, 0.03184593841433525, -0.136636421084404, 0.06474752724170685, 0.026380013674497604, -0.10410038381814957, -0.12412647902965546, 0.12645186483860016, -0.033340130001306534, 0.2006198614835739, 0.22493226826190948, -0.10726853460073471, 0.0285380519926548, 0.006691027898341417, 0.020779786631464958, 0.0047211796045303345, 0.0692061185836792, -0.1529437005519867, -0.05900571867823601, -0.16324405372142792, -0.0038301495369523764, -0.366066575050354, -0.09375772625207901, 0.026500850915908813, 0.11708612740039825, 0.15236999094486237, 0.0006040235166437924, 0.19129706919193268, 0.07584353536367416, 0.059261154383420944, -0.0854436382651329, 0.2369573414325714, 0.022265875712037086, -0.009827369824051857, -0.02472001314163208, -0.09695006161928177, -0.04484465345740318, -0.10371306538581848, 0.03912179544568062, -0.09755769371986389, 0.025285061448812485, 0.17692600190639496, -0.03449543938040733, -0.12499570101499557, -0.040242355316877365, -0.0714082419872284, 0.09651610255241394, -0.03353477269411087, -0.051530513912439346, -0.08033189177513123, 0.007830614224076271, -0.010996154509484768, 0.12988325953483582, -0.21894407272338867, -0.07277990877628326, 0.10448898375034332, 0.010345268994569778, 0.14675040543079376, 0.003351048333570361, -0.10149387270212173, 0.031269606202840805, -0.11014512181282043, 0.048128318041563034, -0.1406070441007614, 0.030840380117297173, 0.11609066277742386, -0.013758029788732529, -0.005003648344427347, -0.2564679980278015, -0.0064535378478467464, 0.04302262142300606, -0.15742169320583344, -0.10222182422876358 ]
449573f53267291c78b2fbad102b7866e02637e9
# Commonsense QA CoT (Partial, Raw, No Human Annotation) ## Dataset Summary Seeded by the CommonsenseQA dataset (tau/commonsense_qa) this preliminary set randomly samples 1,000 question-answer entries and uses Mixtral (mistralai/Mixtral-8x7B-Instruct-v0.1) to generate 3 unique CoT (Chain-of-Thought) rationales. This was created as the preliminary step towards fine-tuning a LM (language model) to specialize on commonsense reasoning. The working hypothesis, inspired by the research papers listed below, is that a diverse set of CoT rationales passed along with the CommonsenseQA question-answer choices will provide accelerated commonsense reasoning performance on even a relatively small model (<3B parameters). Additional refinement and annotations to this dataset are to follow. Background research and inspiration from the following papers: CommonsenseQA: A Question Answering Challenge Targeting Commonsense Knowledge (https://arxiv.org/abs/1811.00937) Chain-of-Thought Prompting Elicits Reasoning in Large Language Models (https://arxiv.org/abs/2201.11903) Specializing Smaller Language Models towards Multi-Step Reasoning (https://arxiv.org/abs/2301.12726) Orca 2: Teaching Small Language Models How to Reason (https://arxiv.org/abs/2311.11045) Large Language Models Are Reasoning Teachers (https://arxiv.org/abs/2212.10071) Teaching Small Language Models to Reason (https://arxiv.org/abs/2212.08410) ## Dataset Structure ### Languages The dataset is in English (`en`). ### Data Fields - `id` (`str`): Unique ID. - `question`: a `string` feature. - `question_concept` (`str`): ConceptNet concept associated to the question. - `choices`: a dictionary feature containing: - `label`: a `string` feature. - `text`: a `string` feature. - `answerKey`: a `string` feature. - `response1`: a `string` feature. - `response2`: a `string` feature. - `response3`: a `string` feature. ### Data Example ``` {'id': '1fe48d12b6f6e4e38f4445f3ec60d5c5', 'question': 'What can happen to someone too sure of their learning?', 'question_concept': 'learning', 'choices': {'label': ['A', 'B', 'C', 'D', 'E'], 'text': ['growth', 'gaining knowledge', 'enlightenment', 'knowing more', 'overconfidence']}, 'answerKey': 'E', 'response 1': 'When someone is too sure of their learning, they become ' 'overconfident, thinking that they know everything. This can ' 'prevent them from learning more, as they stop seeking new ' 'knowledge and ideas. They might also miss out on ' 'enlightenment, as they close themselves off to new ' 'perspectives. Overall, their growth might be stunted, as they ' 'stop challenging themselves and expanding their ' 'understanding. So, out of the given choices, the most ' 'appropriate answer is overconfidence.', 'response 2': 'Overconfidence can lead someone to fail to question their own ' 'preconceptions, in turn hindering further learning and ' 'receptiveness to new information. On the other hand, growth, ' 'gaining knowledge, enlightenment, and knowing more would all ' 'be positive consequences of learning. Therefore, the most ' 'accurate answer would be overconfidence.', 'response 3': 'Knowing more and gaining knowledge come with learning, so ' 'those are probably not the answers. Growth is a natural ' 'response to learning, and one should always be open to ' "enlightenment, so those can't be the answers either. " 'Overconfidence may be the best answer - being too sure of ' "one's learning can lead to arrogance and a lack of " 'willingness to learn more. (Source: English Consonant ' 'Spelling)'} ``` ### Source Data - **Data:** https://huggingface.co/datasets/tau/commonsense_qa - **Homepage:** https://www.tau-nlp.org/commonsenseqa - **Repository:** https://github.com/jonathanherzig/commonsenseqa - **Paper:** https://arxiv.org/abs/1811.00937 ### Licensing Information The dataset is licensed under the MIT License.
peterkchung/commonsense_cot_partial_raw
[ "task_categories:question-answering", "size_categories:n<1K", "language:en", "license:mit", "arxiv:1811.00937", "arxiv:2201.11903", "arxiv:2301.12726", "arxiv:2311.11045", "arxiv:2212.10071", "arxiv:2212.08410", "region:us" ]
2024-01-25T14:26:35+00:00
{"language": ["en"], "license": "mit", "size_categories": ["n<1K"], "task_categories": ["question-answering"], "pretty_name": "CommonsenseQA CoT - Partial list, Raw entries, No Human Annotation", "dataset_info": {"features": [{"name": "id", "dtype": "string"}, {"name": "question", "dtype": "string"}, {"name": "question_concept", "dtype": "string"}, {"name": "choices", "struct": [{"name": "label", "sequence": "string"}, {"name": "text", "sequence": "string"}]}, {"name": "answerKey", "dtype": "string"}, {"name": "response 1", "dtype": "string"}, {"name": "response 2", "dtype": "string"}, {"name": "response 3", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 1364962, "num_examples": 1000}], "download_size": 823371, "dataset_size": 1364962}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]}
2024-01-26T19:06:40+00:00
[ "1811.00937", "2201.11903", "2301.12726", "2311.11045", "2212.10071", "2212.08410" ]
[ "en" ]
TAGS #task_categories-question-answering #size_categories-n<1K #language-English #license-mit #arxiv-1811.00937 #arxiv-2201.11903 #arxiv-2301.12726 #arxiv-2311.11045 #arxiv-2212.10071 #arxiv-2212.08410 #region-us
# Commonsense QA CoT (Partial, Raw, No Human Annotation) ## Dataset Summary Seeded by the CommonsenseQA dataset (tau/commonsense_qa) this preliminary set randomly samples 1,000 question-answer entries and uses Mixtral (mistralai/Mixtral-8x7B-Instruct-v0.1) to generate 3 unique CoT (Chain-of-Thought) rationales. This was created as the preliminary step towards fine-tuning a LM (language model) to specialize on commonsense reasoning. The working hypothesis, inspired by the research papers listed below, is that a diverse set of CoT rationales passed along with the CommonsenseQA question-answer choices will provide accelerated commonsense reasoning performance on even a relatively small model (<3B parameters). Additional refinement and annotations to this dataset are to follow. Background research and inspiration from the following papers: CommonsenseQA: A Question Answering Challenge Targeting Commonsense Knowledge (URL Chain-of-Thought Prompting Elicits Reasoning in Large Language Models (URL Specializing Smaller Language Models towards Multi-Step Reasoning (URL Orca 2: Teaching Small Language Models How to Reason (URL Large Language Models Are Reasoning Teachers (URL Teaching Small Language Models to Reason (URL ## Dataset Structure ### Languages The dataset is in English ('en'). ### Data Fields - 'id' ('str'): Unique ID. - 'question': a 'string' feature. - 'question_concept' ('str'): ConceptNet concept associated to the question. - 'choices': a dictionary feature containing: - 'label': a 'string' feature. - 'text': a 'string' feature. - 'answerKey': a 'string' feature. - 'response1': a 'string' feature. - 'response2': a 'string' feature. - 'response3': a 'string' feature. ### Data Example ### Source Data - Data: URL - Homepage: URL - Repository: URL - Paper: URL ### Licensing Information The dataset is licensed under the MIT License.
[ "# Commonsense QA CoT (Partial, Raw, No Human Annotation)", "## Dataset Summary\n\nSeeded by the CommonsenseQA dataset (tau/commonsense_qa) this preliminary set randomly samples 1,000 question-answer\nentries and uses Mixtral (mistralai/Mixtral-8x7B-Instruct-v0.1) to generate 3 unique CoT (Chain-of-Thought) rationales.\nThis was created as the preliminary step towards fine-tuning a LM (language model) to specialize on commonsense reasoning.\n\nThe working hypothesis, inspired by the research papers listed below, is that a diverse set of CoT rationales passed along\nwith the CommonsenseQA question-answer choices will provide accelerated commonsense reasoning performance on even a relatively \nsmall model (<3B parameters).\n\nAdditional refinement and annotations to this dataset are to follow.\n\nBackground research and inspiration from the following papers: \n\nCommonsenseQA: A Question Answering Challenge Targeting Commonsense Knowledge (URL \nChain-of-Thought Prompting Elicits Reasoning in Large Language Models (URL \nSpecializing Smaller Language Models towards Multi-Step Reasoning (URL \nOrca 2: Teaching Small Language Models How to Reason (URL \nLarge Language Models Are Reasoning Teachers (URL \nTeaching Small Language Models to Reason (URL", "## Dataset Structure", "### Languages\n\nThe dataset is in English ('en').", "### Data Fields\n\n- 'id' ('str'): Unique ID.\n- 'question': a 'string' feature.\n- 'question_concept' ('str'): ConceptNet concept associated to the question.\n- 'choices': a dictionary feature containing:\n - 'label': a 'string' feature.\n - 'text': a 'string' feature.\n- 'answerKey': a 'string' feature.\n- 'response1': a 'string' feature.\n- 'response2': a 'string' feature.\n- 'response3': a 'string' feature.", "### Data Example", "### Source Data\n\n- Data: URL\n- Homepage: URL\n- Repository: URL\n- Paper: URL", "### Licensing Information\n\nThe dataset is licensed under the MIT License." ]
[ "TAGS\n#task_categories-question-answering #size_categories-n<1K #language-English #license-mit #arxiv-1811.00937 #arxiv-2201.11903 #arxiv-2301.12726 #arxiv-2311.11045 #arxiv-2212.10071 #arxiv-2212.08410 #region-us \n", "# Commonsense QA CoT (Partial, Raw, No Human Annotation)", "## Dataset Summary\n\nSeeded by the CommonsenseQA dataset (tau/commonsense_qa) this preliminary set randomly samples 1,000 question-answer\nentries and uses Mixtral (mistralai/Mixtral-8x7B-Instruct-v0.1) to generate 3 unique CoT (Chain-of-Thought) rationales.\nThis was created as the preliminary step towards fine-tuning a LM (language model) to specialize on commonsense reasoning.\n\nThe working hypothesis, inspired by the research papers listed below, is that a diverse set of CoT rationales passed along\nwith the CommonsenseQA question-answer choices will provide accelerated commonsense reasoning performance on even a relatively \nsmall model (<3B parameters).\n\nAdditional refinement and annotations to this dataset are to follow.\n\nBackground research and inspiration from the following papers: \n\nCommonsenseQA: A Question Answering Challenge Targeting Commonsense Knowledge (URL \nChain-of-Thought Prompting Elicits Reasoning in Large Language Models (URL \nSpecializing Smaller Language Models towards Multi-Step Reasoning (URL \nOrca 2: Teaching Small Language Models How to Reason (URL \nLarge Language Models Are Reasoning Teachers (URL \nTeaching Small Language Models to Reason (URL", "## Dataset Structure", "### Languages\n\nThe dataset is in English ('en').", "### Data Fields\n\n- 'id' ('str'): Unique ID.\n- 'question': a 'string' feature.\n- 'question_concept' ('str'): ConceptNet concept associated to the question.\n- 'choices': a dictionary feature containing:\n - 'label': a 'string' feature.\n - 'text': a 'string' feature.\n- 'answerKey': a 'string' feature.\n- 'response1': a 'string' feature.\n- 'response2': a 'string' feature.\n- 'response3': a 'string' feature.", "### Data Example", "### Source Data\n\n- Data: URL\n- Homepage: URL\n- Repository: URL\n- Paper: URL", "### Licensing Information\n\nThe dataset is licensed under the MIT License." ]
[ 91, 18, 287, 6, 15, 140, 5, 22, 17 ]
[ "passage: TAGS\n#task_categories-question-answering #size_categories-n<1K #language-English #license-mit #arxiv-1811.00937 #arxiv-2201.11903 #arxiv-2301.12726 #arxiv-2311.11045 #arxiv-2212.10071 #arxiv-2212.08410 #region-us \n# Commonsense QA CoT (Partial, Raw, No Human Annotation)## Dataset Summary\n\nSeeded by the CommonsenseQA dataset (tau/commonsense_qa) this preliminary set randomly samples 1,000 question-answer\nentries and uses Mixtral (mistralai/Mixtral-8x7B-Instruct-v0.1) to generate 3 unique CoT (Chain-of-Thought) rationales.\nThis was created as the preliminary step towards fine-tuning a LM (language model) to specialize on commonsense reasoning.\n\nThe working hypothesis, inspired by the research papers listed below, is that a diverse set of CoT rationales passed along\nwith the CommonsenseQA question-answer choices will provide accelerated commonsense reasoning performance on even a relatively \nsmall model (<3B parameters).\n\nAdditional refinement and annotations to this dataset are to follow.\n\nBackground research and inspiration from the following papers: \n\nCommonsenseQA: A Question Answering Challenge Targeting Commonsense Knowledge (URL \nChain-of-Thought Prompting Elicits Reasoning in Large Language Models (URL \nSpecializing Smaller Language Models towards Multi-Step Reasoning (URL \nOrca 2: Teaching Small Language Models How to Reason (URL \nLarge Language Models Are Reasoning Teachers (URL \nTeaching Small Language Models to Reason (URL## Dataset Structure### Languages\n\nThe dataset is in English ('en')." ]
[ -0.07210171222686768, 0.04149377718567848, -0.0035955135244876146, 0.08238227665424347, 0.002251775236800313, 0.03993324935436249, 0.06668248772621155, 0.0635126605629921, -0.020211298018693924, 0.022234885022044182, 0.017615200951695442, 0.01955721341073513, 0.08508235961198807, -0.04834659770131111, 0.06392096728086472, -0.28307661414146423, 0.046141475439071655, -0.07153879851102829, 0.07021720707416534, 0.07458221167325974, 0.08991413563489914, -0.03766582906246185, 0.07471100240945816, 0.05583599954843521, 0.06957506388425827, -0.00964331068098545, -0.020505286753177643, -0.03950652480125427, 0.11071213334798813, 0.07197389006614685, -0.05451268330216408, 0.02939439006149769, 0.03726742044091225, -0.12671145796775818, 0.00927349179983139, -0.006245806813240051, 0.042090799659490585, 0.04602661356329918, 0.06725937873125076, -0.061459653079509735, 0.2142980992794037, 0.0713588148355484, 0.024166572839021683, 0.1042923778295517, -0.10261757671833038, -0.11358675360679626, -0.04494614526629448, 0.028227942064404488, -0.03215150162577629, 0.06362524628639221, -0.03165429085493088, 0.037446316331624985, -0.07095956802368164, 0.04145834222435951, 0.13104405999183655, -0.05863603577017784, 0.005932141095399857, 0.06930432468652725, 0.0010366166243329644, 0.08718268573284149, -0.03031475655734539, -0.040538571774959564, 0.013213220983743668, 0.036540787667036057, 0.028370752930641174, -0.05612845718860626, -0.0730966329574585, -0.03417902812361717, -0.08210080116987228, -0.00917653925716877, 0.10161078721284866, -0.006531266961246729, -0.07678590714931488, -0.11358681321144104, -0.0145086869597435, 0.15521351993083954, -0.032274916768074036, -0.034957583993673325, 0.04821622744202614, 0.01911291293799877, 0.19985242187976837, -0.09521811455488205, -0.08157715946435928, 0.024390261620283127, -0.05980510637164116, 0.020439064130187035, -0.012493273243308067, 0.05388418585062027, 0.005201528314501047, 0.05173207074403763, -0.09866790473461151, -0.07986059039831161, -0.10770445317029953, -0.0826590359210968, 0.046828191727399826, -0.027474166825413704, -0.0001188351379823871, 0.09407954663038254, 0.010460092686116695, 0.0968853309750557, -0.13768218457698822, 0.040004413574934006, 0.014841567724943161, 0.013779803179204464, 0.09840278327465057, -0.030193721875548363, -0.12510962784290314, 0.0032602231949567795, 0.03509051725268364, 0.07478293776512146, -0.0339505597949028, -0.010755416005849838, -0.04134340584278107, -0.025340650230646133, -0.06279927492141724, 0.012256653979420662, 0.004758751951158047, -0.00021703180391341448, -0.08481995761394501, -0.028825579211115837, 0.11637992411851883, -0.03915266692638397, -0.03675062209367752, 0.011236798949539661, -0.05540302395820618, 0.011703429743647575, -0.0019069175468757749, 0.07948794215917587, -0.050701338797807693, 0.010241302661597729, -0.07926905900239944, -0.030586715787649155, -0.08300797641277313, -0.07908761501312256, 0.025578368455171585, -0.0030385153368115425, 0.008652950637042522, -0.06296262890100479, -0.25825726985931396, -0.0700748935341835, 0.05128523334860802, -0.07617077231407166, -0.010781800374388695, -0.044325441122055054, -0.09487522393465042, -0.02344755455851555, 0.01230376586318016, 0.05958087369799614, -0.05605514720082283, -0.0165802501142025, -0.08089938759803772, 0.06863974779844284, -0.10251729190349579, 0.007735176477581263, -0.18231309950351715, 0.011154635809361935, -0.127924844622612, 0.14011092483997345, -0.06929606944322586, -0.0470905676484108, -0.07734781503677368, -0.03463944420218468, -0.05522648245096207, 0.061508893966674805, 0.027942506596446037, 0.1947021186351776, -0.18106691539287567, -0.0003127656236756593, 0.04668431356549263, -0.09578337520360947, -0.11132322251796722, 0.09424052387475967, -0.05109703913331032, 0.28462257981300354, 0.06882509589195251, 0.15499159693717957, -0.016309089958667755, 0.119722381234169, -0.06859735399484634, -0.011940469965338707, 0.03673727437853813, 0.13089828193187714, 0.044112615287303925, -0.0035779017489403486, -0.010379902087152004, 0.031974002718925476, 0.01588411070406437, -0.0467674657702446, -0.025341080501675606, -0.06051249802112579, 0.017698580399155617, -0.03971517086029053, -0.012260744348168373, -0.007072003558278084, -0.03980522230267525, -0.016004759818315506, -0.06790315359830856, -0.010130343027412891, 0.05592324584722519, -0.010755046270787716, -0.016298608854413033, -0.10565337538719177, -0.03561157360672951, -0.02669728919863701, -0.01187540777027607, -0.17372353374958038, -0.01971503347158432, -0.009406493976712227, 0.04430181533098221, 0.07103003561496735, 0.017255712300539017, 0.01670372113585472, -0.005050497129559517, -0.049843598157167435, 0.029029710218310356, -0.06338056176900864, -0.004414910450577736, -0.07397199422121048, -0.19412043690681458, -0.006848562974482775, -0.04903840273618698, 0.07847171276807785, -0.09791919589042664, 0.008134482428431511, 0.052559301257133484, 0.0052341013215482235, 0.05762960761785507, -0.03569300100207329, 0.03081202320754528, 0.0001582994154887274, -0.041218727827072144, -0.010576368309557438, 0.041418734937906265, -0.026897836476564407, 0.008921667002141476, 0.10695727169513702, -0.07734880596399307, -0.13663692772388458, -0.00550685403868556, 0.05818905308842659, -0.03852379322052002, -0.054738420993089676, -0.03900124877691269, -0.013122341595590115, -0.043705616146326065, -0.08863304555416107, 0.08116048574447632, 0.006253953091800213, -0.017929600551724434, -0.13552291691303253, -0.008070200681686401, -0.002623678417876363, -0.03353232517838478, 0.04584219679236412, 0.12833945453166962, 0.1279771775007248, -0.12053634971380234, 0.06278418749570847, -0.06718284636735916, 0.06366562843322754, 0.08507423847913742, 0.022211000323295593, -0.0631708949804306, -0.02088404819369316, -0.07951486110687256, -0.012960045598447323, 0.11024525761604309, -0.017883246764540672, 0.04002091661095619, 0.06466394662857056, 0.05231587588787079, 0.04296417161822319, -0.07795567810535431, 0.018852274864912033, 0.013686158694326878, -0.019352920353412628, -0.07226697355508804, 0.014359625987708569, -0.006864059716463089, 0.11169403791427612, -0.05759467929601669, 0.022649792954325676, -0.04814758896827698, -0.017015036195516586, -0.16235299408435822, 0.15319184958934784, -0.033901482820510864, -0.14231055974960327, -0.07849925756454468, 0.041280802339315414, -0.016823112964630127, -0.01757355034351349, 0.029892124235630035, 0.04156873747706413, -0.04909002408385277, -0.16908414661884308, 0.05433858186006546, 0.015773123130202293, -0.0955517515540123, -0.1354249119758606, 0.01184016466140747, -0.019360952079296112, -0.09438395500183105, -0.02474825084209442, -0.02542944997549057, -0.026410074904561043, -0.012057917192578316, -0.02386992983520031, 0.03178985044360161, 0.08866605162620544, 0.05740867927670479, -0.06687659025192261, -0.049723654985427856, 0.36625996232032776, -0.016724005341529846, 0.1685425490140915, 0.10353685915470123, -0.05634256452322006, 0.07123694568872452, 0.18680673837661743, 0.039265502244234085, -0.11392829567193985, 0.042527373880147934, 0.06752542406320572, -0.09178230166435242, -0.18567200005054474, -0.0035031959414482117, -0.06363386660814285, 0.002275839913636446, 0.019666923210024834, -0.000013024038707953878, 0.02067459188401699, 0.10754428803920746, -0.04083175212144852, -0.021784381940960884, -0.03418600559234619, 0.007416377775371075, 0.05736233666539192, 0.033327627927064896, 0.06853628903627396, -0.024218710139393806, 0.028781650587916374, 0.0587298721075058, 0.016872588545084, 0.1481543481349945, -0.08115179091691971, 0.13017520308494568, 0.0738055408000946, 0.06246490404009819, 0.014836844988167286, 0.12836046516895294, -0.09541800618171692, 0.020237116143107414, -0.05636424943804741, -0.037009097635746, -0.06019360572099686, 0.12849882245063782, -0.06440854072570801, 0.015142817981541157, -0.01912301778793335, 0.05939778313040733, 0.013221574947237968, 0.28207072615623474, 0.044061582535505295, -0.1705956906080246, -0.08032874017953873, 0.071502685546875, -0.016194472089409828, -0.07857823371887207, 0.011145747266709805, 0.10661846399307251, -0.0770529955625534, 0.0018796180374920368, 0.02166173607110977, 0.08576728403568268, -0.08385968208312988, 0.009163404814898968, -0.1305917352437973, 0.03122246265411377, -0.02859359234571457, 0.0855940654873848, -0.1097443625330925, 0.07496016472578049, 0.022120457142591476, 0.08040840923786163, -0.14435185492038727, -0.00411803787574172, 0.03298872336745262, -0.03949705511331558, 0.16586878895759583, 0.025057146325707436, -0.05208240821957588, -0.02481682039797306, -0.08158964663743973, 0.05555654689669609, -0.008392359130084515, -0.1225079596042633, 0.12075165659189224, 0.018377656117081642, 0.06337742507457733, -0.0037243065889924765, 0.05802902951836586, -0.15801623463630676, -0.18446338176727295, 0.06086154282093048, -0.03008144348859787, -0.008214157074689865, -0.032675113528966904, -0.01520182192325592, 0.01417956780642271, 0.029431357979774475, -0.10450757294893265, -0.1350494623184204, -0.10101915150880814, 0.010030497796833515, 0.10735707730054855, -0.02613012306392193, -0.00045882046106271446, 0.029837176203727722, 0.038158636540174484, -0.01702427677810192, -0.04563158378005028, 0.09007230401039124, -0.10870809853076935, -0.14750410616397858, -0.02252948470413685, 0.057836856693029404, 0.1574358493089676, 0.08437848091125488, 0.013257999904453754, 0.02912868559360504, 0.03298043832182884, -0.09893494844436646, 0.017312852665781975, 0.14943939447402954, -0.019513046368956566, 0.007097410503774881, -0.037006132304668427, 0.033686570823192596, -0.03661005571484566, -0.036103203892707825, 0.058455172926187515, 0.14230526983737946, -0.028896359726786613, 0.15283358097076416, 0.1653520166873932, -0.09063729643821716, -0.1827421337366104, 0.08452951163053513, 0.043387822806835175, 0.018065311014652252, 0.14185279607772827, -0.19863195717334747, 0.12815366685390472, 0.042979683727025986, 0.000547899748198688, -0.003965683281421661, -0.22964000701904297, -0.11244577914476395, -0.017473800107836723, 0.016910022124648094, 0.11786559224128723, -0.047776494175195694, -0.028802460059523582, 0.007616985123604536, -0.09694954752922058, 0.0974447950720787, 0.06506505608558655, 0.028529759496450424, 0.02671460062265396, 0.08358312398195267, 0.050270359963178635, -0.02051129750907421, 0.15033037960529327, -0.014000743627548218, 0.02579483389854431, -0.023308169096708298, -0.08082330226898193, 0.10712951421737671, -0.002450900385156274, 0.13570809364318848, 0.03159203380346298, 0.018791576847434044, -0.07010257244110107, -0.07979968190193176, -0.08774673938751221, 0.04538022726774216, -0.039708394557237625, -0.10120604932308197, -0.06851713359355927, 0.11649201065301895, 0.09410937875509262, 0.022347504273056984, -0.08911582827568054, -0.13878726959228516, -0.009794515557587147, 0.17295831441879272, 0.3059904873371124, -0.08770863711833954, 0.05389910563826561, 0.0558415912091732, -0.014179471880197525, 0.04986819997429848, -0.14810368418693542, 0.02001885697245598, 0.13052856922149658, -0.022517843171954155, 0.11778665333986282, 0.0413343608379364, -0.15170074999332428, 0.020512539893388748, 0.05245480686426163, -0.04660754278302193, -0.27687302231788635, -0.0034177612978965044, 0.1447913497686386, -0.1882575899362564, -0.04938531666994095, 0.16219648718833923, -0.06832147389650345, -0.03323807194828987, 0.006262949202209711, 0.11925403773784637, 0.02367435023188591, 0.0652996376156807, -0.0007573586772195995, 0.05400026589632034, -0.07335390895605087, 0.04430833086371422, 0.02203279174864292, -0.0906648337841034, 0.013453096151351929, 0.085226871073246, -0.08201590925455093, -0.025387484580278397, -0.0973331406712532, -0.03655634820461273, -0.09396645426750183, -0.05996314063668251, 0.03142917528748512, -0.0999271497130394, 0.02931247651576996, 0.052809953689575195, -0.0028350420761853456, -0.013726250268518925, -0.0417957603931427, 0.004299149848520756, -0.010867049917578697, 0.0544816330075264, 0.009291078895330429, 0.02353265881538391, 0.011116701178252697, -0.06560122966766357, -0.013407999649643898, -0.02810112200677395, -0.01702117547392845, 0.015469414182007313, -0.07139114290475845, 0.002494745422154665, -0.2107822746038437, 0.029501991346478462, -0.1459542214870453, -0.024590175598859787, 0.02164655178785324, -0.0064455377869307995, -0.04263067990541458, 0.02565986104309559, -0.0909508541226387, 0.006072598043829203, -0.023958437144756317, 0.09713773429393768, -0.06147674843668938, 0.04362265765666962, 0.07972220331430435, -0.061533860862255096, 0.0968475490808487, 0.009073509834706783, -0.07354544848203659, -0.02983999066054821, -0.06063557043671608, 0.08128448575735092, -0.0034340298734605312, 0.06570694595575333, -0.007525429129600525, -0.12596817314624786, -0.02821439877152443, -0.0028866983484476805, -0.03951601684093475, -0.03489458188414574, -0.004348219837993383, -0.03332294896245003, 0.14591138064861298, 0.08053958415985107, -0.02501715160906315, -0.06570834666490555, -0.006229077465832233, 0.010470717214047909, -0.020656676962971687, 0.08501186966896057, 0.06007340922951698, 0.004957393277436495, -0.10825823992490768, 0.005663502961397171, 0.0009016050025820732, 0.035214610397815704, 0.014724953100085258, -0.07177981734275818, 0.05057568475604057, -0.012587461620569229, 0.16042624413967133, -0.019549453631043434, -0.04241521656513214, 0.014611542224884033, -0.01050319243222475, 0.11104397475719452, 0.0380689762532711, 0.0015012742951512337, 0.02044304460287094, -0.06288296729326248, 0.013818004168570042, 0.03394165262579918, -0.04222242534160614, 0.03547735512256622, 0.2254336029291153, 0.024894678965210915, 0.20308725535869598, 0.019656291231513023, 0.05245834216475487, 0.030697567388415337, 0.032113876193761826, 0.10622653365135193, 0.1307137906551361, -0.004830548074096441, -0.049582142382860184, 0.05286278948187828, 0.15267206728458405, -0.07566215842962265, 0.10093124210834503, -0.04820529744029045, -0.01237762812525034, -0.10219736397266388, -0.20417886972427368, -0.03424288332462311, 0.002519897883757949, -0.0659400001168251, -0.15724480152130127, -0.06790905445814133, 0.17481717467308044, 0.017504917457699776, 0.024403061717748642, 0.08656717836856842, -0.0527220256626606, -0.08556288480758667, 0.05721971020102501, -0.01402635034173727, 0.014107713475823402, -0.07500309497117996, -0.0016278927214443684, 0.015137697570025921, 0.0645662173628807, 0.0036271673161536455, 0.06588521599769592, -0.04569043591618538, -0.07545404881238937, -0.022140463814139366, -0.03875255212187767, -0.03677141293883324, 0.020631803199648857, -0.03740498423576355, 0.11392300575971603, 0.08509145677089691, -0.014397627674043179, 0.045229535549879074, 0.14129653573036194, -0.03564387932419777, -0.042536940425634384, -0.12282655388116837, 0.0869513601064682, -0.08357331156730652, 0.06991476565599442, 0.05114079266786575, -0.09845813363790512, 0.05297194421291351, 0.1573459953069687, 0.13195332884788513, -0.1545356661081314, -0.04318893700838089, -0.029756713658571243, 0.003742754692211747, -0.048910971730947495, 0.03775113821029663, 0.0039801751263439655, 0.2632809579372406, -0.11510880291461945, 0.07533209025859833, -0.03867208585143089, -0.05767959728837013, 0.06979228556156158, 0.14841927587985992, 0.010626625269651413, 0.03556213155388832, -0.07064144313335419, 0.0920979306101799, -0.16275450587272644, -0.2091444879770279, -0.025424066931009293, 0.0032519735395908356, -0.06016930937767029, 0.010427498258650303, -0.1360260248184204, 0.02759169600903988, 0.08420584350824356, -0.04231735318899155, -0.0253286175429821, 0.005940479226410389, 0.04410974308848381, -0.042332280427217484, 0.030636735260486603, 0.13143694400787354, -0.012948031537234783, 0.14085529744625092, -0.0024650061968714, 0.06274019181728363, 0.12017977982759476, 0.0012638319749385118, -0.17919452488422394, -0.0646752342581749, 0.06110750883817673, 0.010039662010967731, -0.014075939543545246, 0.08535723388195038, 0.0584181472659111, 0.10260400921106339, 0.0947783961892128, -0.1379729062318802, 0.06483865529298782, 0.19505935907363892, -0.018714435398578644, -0.0497472770512104, 0.006499369628727436, -0.10122694820165634, 0.11870281398296356, 0.044011086225509644, -0.04506189003586769, -0.015591111034154892, 0.014102702960371971, 0.05126121640205383, 0.025168446823954582, 0.029234865680336952, -0.0032839893829077482, -0.17731694877147675, 0.027563532814383507, 0.07490131258964539, 0.03551360219717026, -0.1969202160835266, -0.06657959520816803, 0.03906739503145218, -0.004591393284499645, 0.005556834861636162, 0.04242995008826256, 0.08751189708709717, -0.009163873270154, -0.025790002197027206, -0.11684565991163254, -0.03607315942645073, 0.1377699375152588, -0.04235624149441719, -0.05535581335425377 ]
fd9ec625f9e59aac5189039adced67cace5cea71
## Source dataset created from https://huggingface.co/datasets/huggan/wikiart ## Task Find two images that have the same genre ## Prompt: ``` The most common method for classifying art paintings is by genre (or theme). The term “genre” refers to the type of image that serves as the subject of a painting. The genre of a painting is classified as a cityscape, landscape, nude painting, portrait, religious painting, sketch and study, or still life. Given the four images of art paintings, can you tell which two of them have the same genre? Select between the following choices. (A) ... (B) ... (C) ... (D) ... ``` --- license: apache-2.0 dataset_info: features: - name: idx dtype: int32 - name: image1 dtype: image - name: image2 dtype: image - name: image3 dtype: image - name: image4 dtype: image - name: choices sequence: string - name: image1_genre dtype: class_label: names: '0': abstract_painting '1': cityscape '2': genre_painting '3': illustration '4': landscape '5': nude_painting '6': portrait '7': religious_painting '8': sketch_and_study '9': still_life - name: image2_genre dtype: class_label: names: '0': abstract_painting '1': cityscape '2': genre_painting '3': illustration '4': landscape '5': nude_painting '6': portrait '7': religious_painting '8': sketch_and_study '9': still_life - name: image3_genre dtype: class_label: names: '0': abstract_painting '1': cityscape '2': genre_painting '3': illustration '4': landscape '5': nude_painting '6': portrait '7': religious_painting '8': sketch_and_study '9': still_life - name: image4_genre dtype: class_label: names: '0': abstract_painting '1': cityscape '2': genre_painting '3': illustration '4': landscape '5': nude_painting '6': portrait '7': religious_painting '8': sketch_and_study '9': still_life - name: answer dtype: string splits: - name: test num_bytes: 486029299.0 num_examples: 300 download_size: 480861315 dataset_size: 486029299.0 configs: - config_name: default data_files: - split: test path: data/test-* ---
PerceptionEval/ArtGenre
[ "region:us" ]
2024-01-25T14:35:10+00:00
{"configs": [{"config_name": "default", "data_files": [{"split": "val", "path": "data/val-*"}, {"split": "test", "path": "data/test-*"}]}], "dataset_info": {"features": [{"name": "idx", "dtype": "int32"}, {"name": "image1", "dtype": "image"}, {"name": "image2", "dtype": "image"}, {"name": "image3", "dtype": "image"}, {"name": "image4", "dtype": "image"}, {"name": "choices", "sequence": "string"}, {"name": "image1_genre", "dtype": {"class_label": {"names": {"0": "abstract_painting", "1": "cityscape", "2": "genre_painting", "3": "illustration", "4": "landscape", "5": "nude_painting", "6": "portrait", "7": "religious_painting", "8": "sketch_and_study", "9": "still_life"}}}}, {"name": "image2_genre", "dtype": {"class_label": {"names": {"0": "abstract_painting", "1": "cityscape", "2": "genre_painting", "3": "illustration", "4": "landscape", "5": "nude_painting", "6": "portrait", "7": "religious_painting", "8": "sketch_and_study", "9": "still_life"}}}}, {"name": "image3_genre", "dtype": {"class_label": {"names": {"0": "abstract_painting", "1": "cityscape", "2": "genre_painting", "3": "illustration", "4": "landscape", "5": "nude_painting", "6": "portrait", "7": "religious_painting", "8": "sketch_and_study", "9": "still_life"}}}}, {"name": "image4_genre", "dtype": {"class_label": {"names": {"0": "abstract_painting", "1": "cityscape", "2": "genre_painting", "3": "illustration", "4": "landscape", "5": "nude_painting", "6": "portrait", "7": "religious_painting", "8": "sketch_and_study", "9": "still_life"}}}}, {"name": "answer", "dtype": "string"}], "splits": [{"name": "val", "num_bytes": 240278209.0, "num_examples": 150}, {"name": "test", "num_bytes": 245751090.0, "num_examples": 150}], "download_size": 480446228, "dataset_size": 486029299.0}}
2024-01-29T10:44:52+00:00
[]
[]
TAGS #region-us
## Source dataset created from URL ## Task Find two images that have the same genre ## Prompt: --- license: apache-2.0 dataset_info: features: - name: idx dtype: int32 - name: image1 dtype: image - name: image2 dtype: image - name: image3 dtype: image - name: image4 dtype: image - name: choices sequence: string - name: image1_genre dtype: class_label: names: '0': abstract_painting '1': cityscape '2': genre_painting '3': illustration '4': landscape '5': nude_painting '6': portrait '7': religious_painting '8': sketch_and_study '9': still_life - name: image2_genre dtype: class_label: names: '0': abstract_painting '1': cityscape '2': genre_painting '3': illustration '4': landscape '5': nude_painting '6': portrait '7': religious_painting '8': sketch_and_study '9': still_life - name: image3_genre dtype: class_label: names: '0': abstract_painting '1': cityscape '2': genre_painting '3': illustration '4': landscape '5': nude_painting '6': portrait '7': religious_painting '8': sketch_and_study '9': still_life - name: image4_genre dtype: class_label: names: '0': abstract_painting '1': cityscape '2': genre_painting '3': illustration '4': landscape '5': nude_painting '6': portrait '7': religious_painting '8': sketch_and_study '9': still_life - name: answer dtype: string splits: - name: test num_bytes: 486029299.0 num_examples: 300 download_size: 480861315 dataset_size: 486029299.0 configs: - config_name: default data_files: - split: test path: data/test-* ---
[ "## Source\ndataset created from URL", "## Task\nFind two images that have the same genre", "## Prompt: \n\n\n---\nlicense: apache-2.0\ndataset_info:\n features:\n - name: idx\n dtype: int32\n - name: image1\n dtype: image\n - name: image2\n dtype: image\n - name: image3\n dtype: image\n - name: image4\n dtype: image\n - name: choices\n sequence: string\n - name: image1_genre\n dtype:\n class_label:\n names:\n '0': abstract_painting\n '1': cityscape\n '2': genre_painting\n '3': illustration\n '4': landscape\n '5': nude_painting\n '6': portrait\n '7': religious_painting\n '8': sketch_and_study\n '9': still_life\n - name: image2_genre\n dtype:\n class_label:\n names:\n '0': abstract_painting\n '1': cityscape\n '2': genre_painting\n '3': illustration\n '4': landscape\n '5': nude_painting\n '6': portrait\n '7': religious_painting\n '8': sketch_and_study\n '9': still_life\n - name: image3_genre\n dtype:\n class_label:\n names:\n '0': abstract_painting\n '1': cityscape\n '2': genre_painting\n '3': illustration\n '4': landscape\n '5': nude_painting\n '6': portrait\n '7': religious_painting\n '8': sketch_and_study\n '9': still_life\n - name: image4_genre\n dtype:\n class_label:\n names:\n '0': abstract_painting\n '1': cityscape\n '2': genre_painting\n '3': illustration\n '4': landscape\n '5': nude_painting\n '6': portrait\n '7': religious_painting\n '8': sketch_and_study\n '9': still_life\n - name: answer\n dtype: string\n splits:\n - name: test\n num_bytes: 486029299.0\n num_examples: 300\n download_size: 480861315\n dataset_size: 486029299.0\nconfigs:\n- config_name: default\n data_files:\n - split: test\n path: data/test-*\n---" ]
[ "TAGS\n#region-us \n", "## Source\ndataset created from URL", "## Task\nFind two images that have the same genre", "## Prompt: \n\n\n---\nlicense: apache-2.0\ndataset_info:\n features:\n - name: idx\n dtype: int32\n - name: image1\n dtype: image\n - name: image2\n dtype: image\n - name: image3\n dtype: image\n - name: image4\n dtype: image\n - name: choices\n sequence: string\n - name: image1_genre\n dtype:\n class_label:\n names:\n '0': abstract_painting\n '1': cityscape\n '2': genre_painting\n '3': illustration\n '4': landscape\n '5': nude_painting\n '6': portrait\n '7': religious_painting\n '8': sketch_and_study\n '9': still_life\n - name: image2_genre\n dtype:\n class_label:\n names:\n '0': abstract_painting\n '1': cityscape\n '2': genre_painting\n '3': illustration\n '4': landscape\n '5': nude_painting\n '6': portrait\n '7': religious_painting\n '8': sketch_and_study\n '9': still_life\n - name: image3_genre\n dtype:\n class_label:\n names:\n '0': abstract_painting\n '1': cityscape\n '2': genre_painting\n '3': illustration\n '4': landscape\n '5': nude_painting\n '6': portrait\n '7': religious_painting\n '8': sketch_and_study\n '9': still_life\n - name: image4_genre\n dtype:\n class_label:\n names:\n '0': abstract_painting\n '1': cityscape\n '2': genre_painting\n '3': illustration\n '4': landscape\n '5': nude_painting\n '6': portrait\n '7': religious_painting\n '8': sketch_and_study\n '9': still_life\n - name: answer\n dtype: string\n splits:\n - name: test\n num_bytes: 486029299.0\n num_examples: 300\n download_size: 480861315\n dataset_size: 486029299.0\nconfigs:\n- config_name: default\n data_files:\n - split: test\n path: data/test-*\n---" ]
[ 6, 7, 11, 514 ]
[ "passage: TAGS\n#region-us \n## Source\ndataset created from URL## Task\nFind two images that have the same genre" ]
[ -0.027292096987366676, 0.15294097363948822, -0.0062897056341171265, 0.05908441171050072, 0.015829075127840042, 0.0769466906785965, 0.13307204842567444, 0.10314559936523438, 0.12072101980447769, -0.012323051691055298, 0.0019966261461377144, 0.03501565009355545, 0.017503567039966583, 0.02419964037835598, -0.017338231205940247, -0.18654368817806244, 0.02656475640833378, 0.06623236835002899, -0.06854910403490067, 0.02547614648938179, 0.03319217637181282, -0.07125968486070633, 0.06221625581383705, -0.06421279162168503, -0.18737338483333588, 0.07038596272468567, -0.04592631757259369, 0.040808502584695816, 0.04598596319556236, -0.0087827043607831, 0.18441282212734222, 0.050829555839300156, 0.07664940506219864, -0.08986920118331909, 0.028340322896838188, -0.008614730089902878, -0.10839955508708954, -0.031544797122478485, 0.21752725541591644, -0.27785244584083557, -0.10960117727518082, 0.09878327697515488, -0.042876631021499634, -0.027494745329022408, -0.192437544465065, 0.006839191075414419, -0.029332978650927544, 0.09351389855146408, 0.1503486931324005, -0.02572690136730671, 0.05412823706865311, 0.013996316120028496, -0.15155364573001862, 0.07742916792631149, 0.11745458096265793, -0.036972690373659134, -0.05060768127441406, 0.18700963258743286, -0.002683427883312106, 0.04141721874475479, -0.07597893476486206, 0.09100710600614548, -0.058280978351831436, -0.03704771399497986, -0.13296827673912048, -0.07138008624315262, -0.20372769236564636, 0.07718135416507721, -0.03827463835477829, -0.010249501094222069, 0.20142535865306854, 0.1047230213880539, 0.14214695990085602, 0.06248461827635765, -0.1521233469247818, 0.13359947502613068, -0.04376808926463127, 0.15494929254055023, -0.007313370704650879, 0.0708913803100586, -0.04393989220261574, -0.04909586161375046, -0.11333149671554565, -0.09078319370746613, -0.06499325484037399, -0.028778111562132835, -0.032534159719944, 0.1277463734149933, -0.14022181928157806, 0.01304074376821518, -0.04328048601746559, -0.10974150896072388, 0.10422973334789276, -0.11547444015741348, -0.07435273379087448, 0.010995527729392052, -0.014103400520980358, -0.1856980174779892, 0.1428356021642685, 0.08250498026609421, 0.08596479147672653, 0.005978712812066078, -0.20274856686592102, 0.1990450769662857, 0.12125147134065628, 0.009278041310608387, -0.018907543271780014, -0.025960728526115417, -0.02577970363199711, -0.07918857783079147, -0.06432533264160156, -0.014665143564343452, -0.11149647831916809, -0.08357556164264679, -0.0799010694026947, 0.016332753002643585, 0.03944803401827812, -0.05707993730902672, -0.15115152299404144, -0.03614291921257973, 0.044657088816165924, -0.02316567488014698, 0.012650102376937866, 0.0430498942732811, -0.028032176196575165, 0.012078467756509781, -0.016883933916687965, 0.09822262078523636, 0.040625885128974915, 0.003918252885341644, -0.08851993829011917, 0.06687740981578827, -0.019512800499796867, -0.04772105813026428, 0.0382610484957695, -0.13633568584918976, 0.03089793212711811, -0.08898740261793137, -0.2519407570362091, -0.007700498681515455, 0.1289343386888504, -0.09539789706468582, 0.10739549994468689, -0.05568777024745941, 0.07312243431806564, -0.04069451987743378, 0.0569746159017086, -0.04534118250012398, -0.09445367008447647, 0.09052369743585587, -0.1596570461988449, 0.18867693841457367, -0.07019119709730148, -0.0020727987866848707, -0.13712288439273834, 0.023127662017941475, -0.1927260309457779, 0.14438976347446442, -0.07765579223632812, 0.2022503763437271, -0.012854224070906639, 0.0910370796918869, -0.1866607367992401, -0.016886577010154724, -0.05607745796442032, 0.1812799572944641, -0.05339619889855385, -0.05248003453016281, 0.2257688343524933, -0.03177710622549057, -0.04325297102332115, 0.017642712220549583, -0.05193068087100983, 0.053951237350702286, 0.05084818974137306, 0.3621859848499298, 0.03575330600142479, -0.036329738795757294, 0.12491820007562637, 0.09792976081371307, -0.13866207003593445, 0.0180052537471056, 0.0735854059457779, 0.01421810220927, 0.02926146611571312, -0.06267356127500534, 0.08419906347990036, 0.10988886654376984, -0.08575595915317535, -0.08599858731031418, 0.07634568214416504, -0.0765124186873436, 0.03054303489625454, 0.042650505900382996, 0.09068919718265533, -0.019194219261407852, -0.005869803950190544, -0.05863798037171364, -0.008118963800370693, 0.0032219740096479654, -0.038911573588848114, 0.05585042014718056, 0.16286811232566833, -0.10002832114696503, -0.031636450439691544, -0.10514556616544724, -0.11553292721509933, 0.024746352806687355, 0.12658239901065826, 0.01470422837883234, -0.03189133107662201, 0.02753334864974022, -0.03145582973957062, -0.03244807571172714, 0.01176893338561058, -0.00535329757258296, -0.042168568819761276, -0.0401034876704216, -0.08709392696619034, 0.07102344930171967, -0.046324994415044785, 0.007948965765535831, -0.021644476801156998, -0.10699065029621124, 0.1067192330956459, 0.18567614257335663, 0.06715426594018936, 0.03387505188584328, 0.09399396181106567, -0.05827052891254425, -0.03595997393131256, -0.0002521745045669377, 0.049282465130090714, -0.034749794751405716, -0.007426782511174679, 0.24185802042484283, 0.09654555469751358, 0.02819630689918995, 0.1497591882944107, -0.40158113837242126, -0.011272985488176346, -0.07142294198274612, -0.005280417390167713, -0.0035079254303127527, -0.06785280257463455, -0.07234115898609161, -0.1673487275838852, -0.002423311583697796, 0.061270345002412796, -0.051421117037534714, -0.013185957446694374, -0.03983590751886368, -0.01423357892781496, -0.034505099058151245, 0.0032625680323690176, 0.1353432834148407, -0.285751610994339, 0.174906924366951, 0.1875985860824585, 0.11201237887144089, 0.11409952491521835, 0.002653884468600154, -0.046706173568964005, 0.06800530850887299, 0.01188255287706852, -0.06995309144258499, 0.1283126026391983, -0.10057979077100754, 0.008532635867595673, 0.0369107648730278, -0.04894702881574631, 0.043615829199552536, -0.17779900133609772, -0.09998272359371185, -0.017420409247279167, -0.05752508342266083, -0.0992598682641983, -0.004284019116312265, 0.007652596104890108, 0.04536925628781319, -0.026020027697086334, -0.03491887077689171, 0.031214648857712746, -0.0845504030585289, -0.011398892849683762, 0.06527946144342422, -0.024214912205934525, -0.2136669158935547, -0.11208467185497284, -0.05256415903568268, 0.0096815787255764, 0.12551595270633698, 0.004834413528442383, -0.057030320167541504, -0.01689271256327629, 0.09451363980770111, 0.08142930269241333, -0.035637255758047104, -0.06022381782531738, -0.029541168361902237, 0.09558916091918945, -0.07073137909173965, -0.05561547353863716, 0.017496204003691673, -0.024834303185343742, 0.03467439487576485, 0.2096467912197113, -0.2407185435295105, 0.17273591458797455, 0.023207660764455795, 0.008825567550957203, 0.06182442978024483, 0.026938538998365402, 0.16589415073394775, -0.10446502268314362, 0.02125895768404007, 0.0045367879793047905, -0.06924224644899368, 0.08875319361686707, 0.104749396443367, 0.08683063089847565, -0.11950180679559708, -0.02919410914182663, 0.07325790822505951, -0.06005895510315895, -0.08583907783031464, -0.08520720154047012, -0.0707891508936882, -0.028755471110343933, 0.10326845943927765, 0.031767431646585464, 0.04808977618813515, 0.11917193233966827, -0.0103196045383811, 0.0826617106795311, 0.07145649194717407, 0.024064937606453896, 0.006359959486871958, 0.035303205251693726, 0.021865423768758774, -0.005107454024255276, -0.11673977971076965, 0.05613540858030319, 0.13263310492038727, 0.24409432709217072, 0.13111676275730133, 0.04275982081890106, 0.023839034140110016, -0.007235548458993435, 0.11665472388267517, 0.08797105401754379, -0.005062856711447239, 0.03971695899963379, -0.03655282035470009, -0.06849905848503113, 0.14113162457942963, 0.08474736660718918, 0.02826840803027153, -0.21750091016292572, 0.08356878161430359, -0.20886293053627014, 0.0361081138253212, 0.2081383317708969, 0.11560514569282532, -0.23422393202781677, 0.14919427037239075, 0.11739256978034973, 0.057581428438425064, -0.02417653799057007, 0.0787171795964241, 0.07469723373651505, -0.00261155073530972, 0.11183515936136246, -0.0009071796084754169, 0.19092611968517303, -0.029854411259293556, -0.034476388245821, -0.06201208382844925, -0.3484317660331726, -0.005401357542723417, 0.06675276160240173, 0.03629855439066887, 0.2881494462490082, 0.06936955451965332, -0.13066716492176056, -0.02021670714020729, -0.11471135914325714, 0.04264969006180763, 0.0844193547964096, 0.16669030487537384, 0.049430619925260544, 0.07971692830324173, -0.14336569607257843, -0.1714903861284256, -0.018506310880184174, 0.0820598378777504, -0.14078310132026672, -0.0011194769758731127, 0.06165872886776924, -0.0807444378733635, -0.05072760954499245, 0.15892377495765686, -0.06549030542373657, -0.009758909232914448, -0.08268067985773087, -0.005797601770609617, 0.0076057519763708115, 0.05825776979327202, 0.05638117715716362, -0.001945726340636611, -0.0660853311419487, 0.08424301445484161, -0.047924306243658066, -0.05760600417852402, 0.0316932275891304, 0.03246302530169487, 0.026020076125860214, 0.004443328827619553, -0.04406445845961571, -0.017746267840266228, -0.03610380366444588, -0.18512551486492157, 0.1177893653512001, -0.05863190442323685, 0.05882846564054489, -0.15539231896400452, 0.07706163823604584, 0.01687825284898281, 0.03875502943992615, 0.03211955726146698, 0.13396824896335602, -0.0552469938993454, 0.03037312999367714, 0.12962788343429565, -0.09833377599716187, 0.025253254920244217, 0.17861081659793854, -0.1161382794380188, 0.01907304674386978, -0.0233283843845129, 0.00623791478574276, 0.11956470459699631, 0.12483915686607361, -0.06489116698503494, 0.15486758947372437, 0.10327015817165375, -0.06428956985473633, -0.3359411060810089, 0.032413966953754425, -0.06530357897281647, 0.025923503562808037, 0.05300353467464447, -0.2227848619222641, 0.11765473335981369, 0.07141793519258499, -0.04711151868104935, 0.21418021619319916, -0.3294611871242523, 0.0026071488391608, -0.05203305929899216, -0.03849814087152481, 0.1795167624950409, -0.185812309384346, -0.05199052393436432, -0.0709008201956749, -0.2541308104991913, 0.15870743989944458, -0.02492670714855194, 0.03339109569787979, 0.02546633966267109, -0.0345388762652874, 0.03373842313885689, 0.013378677889704704, 0.10410194844007492, 0.08756996691226959, 0.13313189148902893, -0.05375748500227928, -0.03504740074276924, 0.20259489119052887, 0.019758163020014763, -0.09495110809803009, 0.13274207711219788, 0.006576609797775745, -0.11282975226640701, -0.009408928453922272, -0.0979856476187706, 0.03190985694527626, 0.07071255147457123, -0.02379666082561016, -0.051786843687295914, -0.04108676314353943, -0.022870363667607307, 0.10630763322114944, 0.2897895872592926, 0.01216194313019514, 0.05364849790930748, -0.0974150225520134, -0.08169049024581909, 0.03435829281806946, -0.06618764251470566, -0.1578446924686432, -0.05584391951560974, 0.16180074214935303, -0.13104987144470215, 0.11835943162441254, 0.07014686614274979, 0.03316617012023926, 0.019377218559384346, 0.07705085724592209, 0.03047838993370533, 0.05749534070491791, 0.17525678873062134, -0.11043564230203629, 0.047078847885131836, -0.06905271857976913, -0.19245204329490662, 0.2606416642665863, -0.13878872990608215, 0.08210544288158417, 0.10559460520744324, -0.04199234023690224, 0.006046540569514036, 0.016254253685474396, -0.02300356514751911, -0.030909277498722076, 0.018476910889148712, -0.01407052855938673, -0.04955141991376877, 0.18945015966892242, 0.15971305966377258, -0.04674298316240311, -0.125084787607193, 0.12313548475503922, -0.04365554079413414, -0.07000310719013214, -0.03115103580057621, 0.18929050862789154, -0.21755929291248322, 0.02899773046374321, -0.052965179085731506, 0.07199686765670776, 0.006354055367410183, 0.227148175239563, 0.06983494758605957, 0.11734268814325333, 0.17616209387779236, 0.025756869465112686, 0.1205306127667427, -0.005597204901278019, -0.007363433483988047, 0.05571558699011803, -0.11124715209007263, -0.20025278627872467, -0.041619181632995605, 0.11553046107292175, -0.09542468190193176, -0.056577716022729874, -0.1355154812335968, 0.10786357522010803, -0.18250370025634766, 0.09849376976490021, 0.021478142589330673, -0.06092429161071777, -0.05675046890974045, -0.03263349086046219, -0.06441950052976608, -0.044758763164281845, -0.15376950800418854, -0.013256742618978024, -0.059534620493650436, 0.02074882946908474, -0.039403922855854034, -0.1261078119277954, 0.11669240146875381, -0.03832133486866951, 0.08050848543643951, 0.10476568341255188, -0.038136161863803864, 0.01354480441659689, -0.3055965304374695, -0.2265375703573227, 0.0967320129275322, 0.06262119859457016, -0.026445792987942696, 0.23768125474452972, 0.02601497806608677, -0.008550163358449936, 0.03595878183841705, 0.012773599475622177, -0.05854857340455055, -0.12581077218055725, -0.09371877461671829, -0.1289634257555008, -0.18892157077789307, -0.031656526029109955, -0.0432630330324173, 0.12174933403730392, 0.06996383517980576, 0.02135412022471428, 0.0027769936714321375, 0.10135550051927567, -0.06308560818433762, 0.028179775923490524, 0.007299820892512798, -0.0768006443977356, -0.09671060740947723, -0.060627397149801254, -0.00899801217019558, -0.11538322269916534, 0.27767810225486755, 0.12909381091594696, -0.07355699688196182, 0.019952211529016495, 0.17403733730316162, 0.032413505017757416, 0.07020141184329987, 0.2178676426410675, 0.11267081648111343, -0.02763623185455799, -0.08526695519685745, 0.06730542331933975, 0.05668783187866211, -0.002197236754000187, -0.014502763748168945, 0.14007042348384857, 0.04740256443619728, -0.031277626752853394, 0.12124757468700409, 0.008612117730081081, -0.022492598742246628, -0.06217411905527115, 0.0485108457505703, 0.04515831544995308, -0.022967886179685593, 0.045064933598041534, 0.08994359523057938, -0.03615632280707359, 0.09953217953443527, -0.017028018832206726, 0.018144110217690468, -0.030683178454637527, -0.10999825596809387, -0.09252433478832245, -0.14683997631072998, 0.06190238893032074, 0.005104964133352041, 0.08512008935213089, 0.16879163682460785, 0.031725987792015076, 0.03727928176522255, 0.042824435979127884, -0.020099608227610588, -0.061260100454092026, 0.032391514629125595, -0.03509553149342537, -0.0037988824769854546, 0.10542608052492142, -0.0743820071220398, 0.08609051257371902, 0.04934307560324669, -0.08297441899776459, 0.015502049587666988, 0.05305061489343643, -0.07730959355831146, -0.14513593912124634, -0.11293920129537582, -0.05466334521770477, -0.059394922107458115, -0.10176172107458115, -0.039651814848184586, 0.07395634055137634, 0.11581866443157196, 0.008669302798807621, 0.19436269998550415, 0.0122571662068367, 0.05174078047275543, 0.006814655847847462, -0.011749127879738808, -0.11469493806362152, 0.11072520911693573, -0.04243430122733116, -0.14955569803714752, -0.010907969437539577, 0.17350752651691437, 0.1858818680047989, -0.12346545606851578, -0.024609800428152084, 0.008572828955948353, 0.025461122393608093, 0.07504663616418839, 0.026589341461658478, -0.04793139919638634, 0.18943391740322113, -0.07428370416164398, -0.13322918117046356, -0.04738802835345268, -0.03820530325174332, -0.1386164128780365, -0.11108025163412094, 0.057263318449258804, -0.024174517020583153, -0.13670054078102112, 0.21761763095855713, -0.10904156416654587, 0.1886311173439026, 0.15862412750720978, -0.21311582624912262, -0.09227629005908966, -0.05330697447061539, -0.028126701712608337, 0.010534278117120266, 0.05136322230100632, -0.10274777561426163, -0.08577515184879303, -0.13213281333446503, 0.05068357288837433, -0.28243401646614075, -0.178771510720253, -0.02030429057776928, -0.14959684014320374, 0.13455845415592194, -0.05294450372457504, 0.0692838802933693, -0.024384362623095512, -0.0742279440164566, 0.04435540363192558, -0.0837484747171402, 0.014556046575307846, -0.005416754633188248, -0.10416874289512634, 0.18930119276046753, -0.09807939827442169, 0.014315313659608364, 0.06988383829593658, -0.026650913059711456, -0.021108174696564674, 0.13169756531715393, -0.03807080164551735, -0.04303476959466934, -0.00878167524933815, -0.10515912622213364, 0.009223765693604946, 0.04420777037739754, 0.009316862560808659, 0.020074356347322464, -0.016573909670114517, -0.0028004110790789127, 0.11417048424482346, -0.11923829466104507, -0.10372426360845566, -0.008409250527620316, -0.12466785311698914, -0.0037379758432507515, 0.058916009962558746, -0.017225027084350586, 0.025831427425146103, -0.11470548808574677, 0.10188688337802887, -0.027597015723586082, 0.09921536594629288, 0.14465391635894775, 0.005810844711959362, -0.0023028135765343904, -0.32281914353370667, 0.10326753556728363, -0.02274337038397789, -0.002341090003028512, -0.15038201212882996 ]
9985bb28619d31d4be6c69d03ebec5561e76a445
# Dataset Card for Wikipedia Brezhoneg January 2024 Dataset created using this [repo](https://huggingface.co/datasets/olm/wikipedia) with a January 2024 Wikipedia snapshot.
Yannael/wikipedia-brezhoneg
[ "annotations_creators:no-annotation", "language_creators:found", "multilinguality:monolingual", "size_categories:10K<n<100K", "language:br", "license:cc-by-sa-3.0", "license:gfdl", "pretraining", "language modelling", "wikipedia", "web", "region:us" ]
2024-01-25T14:38:44+00:00
{"annotations_creators": ["no-annotation"], "language_creators": ["found"], "language": ["br"], "license": ["cc-by-sa-3.0", "gfdl"], "multilinguality": ["monolingual"], "size_categories": ["10K<n<100K"], "source_datasets": [], "task_ids": [], "pretty_name": "Wikipedia Brezhoneg January 2024", "tags": ["pretraining", "language modelling", "wikipedia", "web"]}
2024-01-25T14:42:00+00:00
[]
[ "br" ]
TAGS #annotations_creators-no-annotation #language_creators-found #multilinguality-monolingual #size_categories-10K<n<100K #language-Breton #license-cc-by-sa-3.0 #license-gfdl #pretraining #language modelling #wikipedia #web #region-us
# Dataset Card for Wikipedia Brezhoneg January 2024 Dataset created using this repo with a January 2024 Wikipedia snapshot.
[ "# Dataset Card for Wikipedia Brezhoneg January 2024\n\nDataset created using this repo with a January 2024 Wikipedia snapshot." ]
[ "TAGS\n#annotations_creators-no-annotation #language_creators-found #multilinguality-monolingual #size_categories-10K<n<100K #language-Breton #license-cc-by-sa-3.0 #license-gfdl #pretraining #language modelling #wikipedia #web #region-us \n", "# Dataset Card for Wikipedia Brezhoneg January 2024\n\nDataset created using this repo with a January 2024 Wikipedia snapshot." ]
[ 82, 27 ]
[ "passage: TAGS\n#annotations_creators-no-annotation #language_creators-found #multilinguality-monolingual #size_categories-10K<n<100K #language-Breton #license-cc-by-sa-3.0 #license-gfdl #pretraining #language modelling #wikipedia #web #region-us \n# Dataset Card for Wikipedia Brezhoneg January 2024\n\nDataset created using this repo with a January 2024 Wikipedia snapshot." ]
[ -0.05595742166042328, 0.046600017696619034, -0.004742831457406282, 0.1328580379486084, 0.04537833109498024, 0.06164335459470749, 0.19215905666351318, 0.0794253796339035, 0.09313659369945526, -0.046685121953487396, 0.11506470292806625, 0.02497013285756111, 0.04549705982208252, 0.06730794906616211, -0.06617441028356552, -0.22818967700004578, 0.020819999277591705, -0.011311325244605541, -0.0717834085226059, 0.071802519261837, 0.1204085424542427, 0.011460223235189915, 0.045655421912670135, -0.010622058063745499, -0.18518061935901642, 0.034081026911735535, -0.0033701262436807156, -0.09049256891012192, 0.063962422311306, 0.07456611841917038, 0.007503293454647064, 0.065839983522892, 0.011065192520618439, -0.1385211944580078, 0.013855138793587685, -0.026575110852718353, -0.014210550114512444, -0.0023795876186341047, -0.04312276467680931, -0.06326132267713547, 0.07151079922914505, -0.007994196377694607, -0.07676392048597336, 0.08459104597568512, -0.11842197924852371, -0.1118704080581665, -0.048820968717336655, 0.006999315228313208, -0.1299067735671997, -0.03395476192235947, -0.026397518813610077, 0.05247388407588005, -0.012657945044338703, 0.02077270857989788, -0.00909818522632122, -0.16544944047927856, -0.048687294125556946, 0.1749298870563507, 0.005768622271716595, 0.02116047777235508, -0.10683633387088776, 0.11505217105150223, 0.0856044590473175, 0.014985481277108192, -0.06511947512626648, -0.09730169177055359, -0.05188419297337532, -0.05990717187523842, 0.01191270537674427, 0.042042750865221024, 0.318572998046875, 0.06900627911090851, -0.03921541944146156, -0.0539027601480484, 0.014923411421477795, 0.09706273674964905, -0.046434368938207626, 0.029695406556129456, -0.024589108303189278, 0.000416966708144173, 0.05844864621758461, -0.14962436258792877, -0.07975659519433975, 0.026440724730491638, -0.11066168546676636, 0.22782061994075775, 0.05278030037879944, 0.09121699631214142, -0.029020043089985847, -0.06930264830589294, -0.13290873169898987, -0.08111201971769333, -0.020492644980549812, -0.08300668746232986, 0.027777867391705513, 0.06649447232484818, 0.02030852809548378, 0.0728989690542221, 0.1686328649520874, 0.014608493074774742, -0.07762615382671356, 0.007419937290251255, -0.13198359310626984, 0.10530854761600494, 0.1268799751996994, -0.11194796860218048, -0.06419000029563904, -0.047673095017671585, 0.0985194742679596, -0.018243122845888138, 0.0250812117010355, -0.0456923246383667, -0.10491746664047241, -0.056653425097465515, -0.07261588424444199, 0.07140317559242249, 0.026824362576007843, 0.05558773875236511, 0.0068352664820849895, -0.0049248551949858665, -0.020174607634544373, -0.07126305997371674, 0.033571723848581314, 0.03180863708257675, -0.07898005098104477, 0.1134616881608963, -0.020637528970837593, -0.030986525118350983, -0.10256784409284592, -0.06730537861585617, 0.01031720731407404, 0.043376389890909195, -0.007868767715990543, -0.10391110181808472, 0.035784997045993805, -0.03319893404841423, 0.03334629908204079, -0.07858789712190628, -0.10611645877361298, -0.028706230223178864, 0.07283592224121094, -0.07171490043401718, -0.02932288497686386, -0.07603388279676437, -0.003181318286806345, 0.012437883764505386, -0.03838576003909111, -0.05155659466981888, -0.0580715611577034, 0.06193052977323532, -0.0354737713932991, 0.0779973492026329, -0.32713574171066284, 0.0278075709939003, -0.06688378751277924, 0.021054372191429138, 0.029570743441581726, 0.11025901138782501, -0.07624301314353943, -0.02286800928413868, -0.07037219405174255, -0.03314780816435814, -0.061960238963365555, 0.06505000591278076, -0.009440317749977112, 0.1409343183040619, -0.15504078567028046, 0.006405164021998644, 0.14287085831165314, -0.037599481642246246, -0.10192875564098358, 0.13537409901618958, -0.03691834583878517, 0.1755872368812561, 0.08123853802680969, 0.2408912628889084, 0.03308875113725662, 0.07820811122655869, 0.04804159700870514, 0.01296885684132576, 0.027471942827105522, -0.03685791790485382, 0.003924825228750706, -0.02486531436443329, -0.036069825291633606, 0.06314606219530106, 0.09919042140245438, -0.007567598018795252, -0.01881367340683937, -0.058331526815891266, -0.04334104433655739, -0.09065257757902145, 0.0923251211643219, 0.0009149518446065485, 0.08523248136043549, -0.11295026540756226, -0.029039904475212097, 0.07281726598739624, 0.027365900576114655, -0.08957623690366745, 0.03629467263817787, -0.061822205781936646, 0.030793294310569763, 0.037577468901872635, 0.046165622770786285, -0.08250174671411514, -0.008068665862083435, 0.03073517605662346, 0.07775849848985672, 0.17855094373226166, 0.10616205632686615, -0.015159949660301208, -0.024655558168888092, -0.07665697485208511, 0.13363370299339294, 0.06451106071472168, -0.006256571039557457, 0.02262692339718342, -0.12819871306419373, 0.0845886841416359, -0.003253885777667165, -0.0166886318475008, -0.08496353030204773, 0.017435245215892792, -0.022804465144872665, 0.038881056010723114, -0.08456899970769882, 0.0595741830766201, -0.000822864065412432, 0.05590984225273132, -0.09861649572849274, 0.005246835295110941, 0.07545540481805801, -0.06174544617533684, -0.060800522565841675, 0.11585748195648193, 0.04453897476196289, -0.03561048209667206, 0.19049948453903198, -0.08408865332603455, 0.028675848618149757, -0.04225081577897072, 0.03647977113723755, 0.006994535215198994, 0.01339303981512785, -0.011163444258272648, 0.02975596860051155, -0.002514664549380541, 0.09297776222229004, -0.07778925448656082, 0.012873421423137188, 0.01607574336230755, -0.032034676522016525, -0.07268068194389343, 0.11430863291025162, 0.11402041465044022, -0.16370683908462524, 0.16701647639274597, 0.07196914404630661, 0.009321387857198715, 0.16929006576538086, -0.004544754046946764, 0.04383297637104988, 0.013192269019782543, 0.016436271369457245, -0.03965163603425026, 0.1300710290670395, -0.1971590518951416, -0.015611262060701847, 0.037712015211582184, 0.016767947003245354, 0.023574871942400932, -0.08355013281106949, -0.037885881960392, -0.06830336153507233, -0.05771973729133606, -0.06118074432015419, 0.020688245072960854, 0.0004438894393388182, 0.0866607055068016, -0.05945076048374176, -0.024046340957283974, 0.06100158765912056, -0.007994107902050018, -0.015622592531144619, 0.11260145157575607, -0.09897381067276001, -0.21197664737701416, -0.057696159929037094, -0.1807045191526413, -0.19186271727085114, -0.00838230736553669, 0.04555259272456169, 0.01289612427353859, 0.015826504677534103, -0.0513848252594471, 0.10504597425460815, -0.18296580016613007, -0.10063674300909042, 0.011815345846116543, 0.024208862334489822, -0.07665646076202393, -0.10969649255275726, 0.007391033228486776, 0.01020124088972807, 0.01891196146607399, 0.09569253027439117, -0.11011036485433578, 0.015346406027674675, 0.07424234598875046, -0.08262447267770767, 0.00754969147965312, -0.058803312480449677, 0.18439148366451263, -0.02743474580347538, 0.03705903887748718, 0.1187005341053009, 0.012202387675642967, -0.0023159042466431856, 0.1819031983613968, 0.080954909324646, -0.10069333016872406, -0.01314191147685051, 0.024610674008727074, -0.0777617022395134, -0.1865670531988144, -0.06397608667612076, -0.06567519903182983, 0.05705327168107033, 0.05334828794002533, 0.028002705425024033, 0.000587081944104284, 0.09881120920181274, -0.01438833586871624, 0.07248643040657043, 0.042950283735990524, 0.011722445487976074, 0.2644709050655365, -0.027347752824425697, 0.07355843484401703, -0.035002611577510834, -0.005548293236643076, 0.08577793836593628, 0.18221189081668854, 0.14980469644069672, 0.07196242362260818, 0.13557679951190948, 0.06283216178417206, 0.06360741704702377, 0.08481339365243912, 0.05025552213191986, 0.031363535672426224, 0.08095575124025345, -0.012195339426398277, -0.018602261319756508, 0.008901956491172314, 0.012622176669538021, 0.07684414833784103, -0.0800137147307396, 0.045510515570640564, -0.10304709523916245, 0.06374036520719528, 0.07663870602846146, 0.12243988364934921, -0.030963731929659843, 0.009738673456013203, 0.07099530845880508, -0.0072133783251047134, -0.1823522448539734, 0.10884802043437958, 0.03956601023674011, -0.02611697092652321, 0.14260727167129517, 0.023724671453237534, 0.09591395407915115, -0.06604539602994919, -0.008989373221993446, -0.09645266085863113, -0.04995549097657204, 0.002399950986728072, 0.0955427959561348, -0.2759529650211334, 0.18366487324237823, 0.0058738901279866695, 0.004078117664903402, -0.0835607647895813, -0.031456589698791504, 0.002419956959784031, 0.06184381991624832, 0.12961453199386597, 0.014296208508312702, 0.029972337186336517, 0.01813601143658161, -0.08262444287538528, -0.0035484021063894033, 0.05131089314818382, 0.0321386381983757, -0.03428177535533905, 0.0347098708152771, 0.05187838152050972, -0.02963237278163433, -0.10999540984630585, -0.24390515685081482, -0.1865079253911972, 0.0272038746625185, 0.1192406415939331, -0.07576391100883484, 0.015331990085542202, -0.05626196414232254, -0.029132995754480362, 0.19816263020038605, 0.046455420553684235, -0.12420503795146942, -0.10456313192844391, 0.13098417222499847, 0.1344468742609024, 0.024159803986549377, 0.004403351806104183, -0.06222181394696236, -0.024438541382551193, -0.1343986690044403, -0.08247188478708267, 0.04367474839091301, -0.00698866369202733, -0.00489531084895134, -0.03350149095058441, 0.14911532402038574, 0.02473394386470318, 0.0018526202766224742, 0.04806104302406311, -0.017784766852855682, 0.020621413365006447, -0.11823716014623642, 0.03127704933285713, -0.13342274725437164, -0.07468625158071518, 0.07039601355791092, -0.05189376324415207, -0.2310233861207962, -0.06730787456035614, -0.11923268437385559, 0.19076402485370636, 0.19972805678844452, -0.03814992681145668, 0.06267089396715164, 0.2212374061346054, -0.07609947770833969, -0.24698393046855927, -0.03341414034366608, -0.07793106138706207, 0.047297392040491104, -0.13031886518001556, -0.16802650690078735, 0.05303243175148964, 0.07775778323411942, 0.00824494007974863, 0.22030113637447357, -0.20972442626953125, -0.11597272753715515, 0.1272737830877304, 0.0676180049777031, 0.22956006228923798, -0.15617331862449646, -0.078391894698143, -0.17559753358364105, -0.028735240921378136, 0.2434484213590622, 0.04502362385392189, 0.07161661982536316, -0.05366314575076103, 0.1027197614312172, -0.020161869004368782, 0.05841483548283577, 0.16347533464431763, 0.020544132217764854, 0.028051039204001427, -0.015587402507662773, -0.07354271411895752, 0.17649854719638824, 0.043177373707294464, 0.041233502328395844, -0.04917237162590027, -0.05796254426240921, 0.016627280041575432, -0.0688033327460289, -0.09646058082580566, 0.09315237402915955, -0.06106605753302574, -0.06476485729217529, -0.11235824227333069, 0.026323799043893814, -0.06192449852824211, 0.022807789966464043, 0.08822537958621979, -0.06455060094594955, -0.016466369852423668, 0.08736003935337067, 0.13546940684318542, 0.017251241952180862, 0.07344359904527664, -0.007145542651414871, -0.06237960606813431, 0.029772497713565826, -0.19938543438911438, -0.042405299842357635, 0.07791811227798462, 0.010795433074235916, 0.053711507469415665, 0.02254263311624527, -0.06734815239906311, 0.06465248763561249, 0.06473276019096375, 0.0069213672541081905, -0.17749720811843872, 0.027243321761488914, 0.011293585412204266, 0.081022709608078, -0.0025968991685658693, 0.15382245182991028, -0.04599975049495697, -0.044531919062137604, -0.02435094118118286, 0.0066847833804786205, -0.031356632709503174, 0.04583670571446419, 0.07638487964868546, 0.01314065232872963, -0.10569873452186584, 0.10498610138893127, 0.10969361662864685, 0.047450028359889984, 0.0023878500796854496, 0.12706522643566132, -0.04810485988855362, -0.09295220673084259, -0.11348720639944077, -0.0657796710729599, -0.06710013747215271, -0.09337848424911499, 0.09714770317077637, -0.15446051955223083, -0.048424527049064636, 0.10876554995775223, 0.0000842279550852254, 0.005955478176474571, -0.01407929789274931, -0.10446429997682571, -0.004895967897027731, 0.03238827362656593, -0.08067192882299423, -0.01275687012821436, 0.01287757232785225, -0.10414665937423706, 0.02357296273112297, 0.08988026529550552, -0.025580117478966713, -0.021498994901776314, -0.09297187626361847, 0.014168787747621536, -0.14843592047691345, 0.0805729329586029, -0.10435646027326584, 0.016886955127120018, 0.024158451706171036, -0.09184543788433075, -0.045946430414915085, -0.023867739364504814, -0.11747090518474579, -0.012696006335318089, 0.024383198469877243, 0.058723002672195435, -0.04349636659026146, -0.027014197781682014, 0.05232471972703934, -0.01610022969543934, 0.09935098886489868, 0.03459562361240387, -0.1308625042438507, 0.10957351326942444, -0.2165246158838272, 0.006337316706776619, 0.016411758959293365, 0.05154796689748764, 0.03310031071305275, -0.030646933242678642, 0.00316391303204, 0.07058176398277283, 0.07913947105407715, 0.04774771258234978, 0.04892296344041824, -0.06319820880889893, -0.1242820993065834, -0.10462459921836853, -0.0645020455121994, 0.05876849591732025, 0.02919686771929264, 0.11157972365617752, 0.00659110676497221, 0.039653562009334564, 0.006363486405462027, 0.018018344417214394, -0.06093597412109375, 0.051643919199705124, -0.011777441017329693, -0.13265679776668549, 0.015242905355989933, -0.03758341446518898, 0.0375346876680851, 0.024838780984282494, 0.28549906611442566, 0.019506504759192467, -0.07760599255561829, -0.011139323934912682, -0.09677833318710327, -0.18826577067375183, 0.05864541977643967, 0.27429065108299255, 0.018599733710289, 0.004897506441920996, -0.025122161954641342, -0.020933642983436584, 0.02629881538450718, 0.07340677082538605, 0.04724382609128952, 0.10911330580711365, 0.028659768402576447, 0.027496518567204475, 0.03428985923528671, -0.042782776057720184, 0.16291159391403198, 0.13787022233009338, 0.07073095440864563, 0.07040013372898102, -0.09605785459280014, 0.060714107006788254, 0.02768576890230179, -0.031363505870103836, 0.05340730771422386, -0.03265777602791786, -0.12485321611166, -0.11738835275173187, -0.15661215782165527, -0.06277167797088623, -0.04373752698302269, 0.03401065245270729, -0.10778452455997467, 0.01257605105638504, 0.08294221013784409, 0.11393877118825912, -0.06312436610460281, -0.06672614812850952, -0.06278803199529648, -0.09234262257814407, 0.10822536051273346, -0.019037583842873573, 0.03810484707355499, -0.04895930737257004, -0.04416004940867424, -0.05282801762223244, 0.06419213861227036, 0.0016208647284656763, 0.07083310186862946, -0.007951756939291954, 0.029112333431839943, -0.13944007456302643, -0.034306950867176056, -0.03315683454275131, 0.022674353793263435, 0.09421627223491669, 0.15330734848976135, 0.05547940358519554, -0.048036616295576096, 0.02596152387559414, 0.08408880978822708, 0.08387799561023712, -0.13990212976932526, -0.03533164784312248, -0.028886806219816208, -0.04308304563164711, 0.021761201322078705, -0.040710631757974625, -0.08758298307657242, -0.029540592804551125, 0.20883187651634216, 0.3450506031513214, -0.05827808752655983, -0.024234216660261154, -0.010864869691431522, -0.0006627527764067054, 0.06976332515478134, 0.063470259308815, 0.027960602194070816, 0.02235615998506546, -0.008838184177875519, -0.06284746527671814, -0.027112537994980812, -0.01695205457508564, -0.12279341369867325, 0.1718735694885254, 0.01657961681485176, -0.04998520016670227, -0.05555570498108864, 0.10841059684753418, -0.10512098670005798, -0.058036066591739655, 0.04330078512430191, -0.10037825256586075, -0.1078898012638092, -0.02182742953300476, -0.13491658866405487, 0.036202140152454376, -0.019692223519086838, -0.06814903020858765, -0.015390204265713692, -0.04410485550761223, -0.021403398364782333, -0.19248925149440765, 0.02270696312189102, 0.047246020287275314, -0.04044764116406441, 0.10835864394903183, 0.00010225951700704172, 0.14841553568840027, 0.05973094329237938, -0.0015418147668242455, -0.016199631616473198, 0.07865792512893677, 0.012611920945346355, 0.21144092082977295, 0.06069118529558182, 0.10931137949228287, -0.0025977506302297115, 0.03257087245583534, 0.07642471045255661, 0.04122448340058327, 0.01859404146671295, 0.1475592851638794, -0.10087841004133224, -0.06271235644817352, 0.04360291734337807, -0.17230728268623352, 0.06350766867399216, 0.08622036874294281, -0.04259591922163963, -0.07005330175161362, -0.056189652532339096, 0.02997724525630474, -0.0411154143512249, -0.02102128230035305, -0.0869709774851799, -0.12704265117645264, -0.06022237613797188, 0.006451140623539686, 0.03591834008693695, -0.2130802422761917, 0.07017222046852112, -0.08613786101341248, 0.07063435763120651, -0.044390689581632614, 0.10750607401132584, 0.06909915059804916, -0.013168534263968468, -0.044058315455913544, -0.12504203617572784, 0.011931633576750755, 0.025514790788292885, -0.02317323535680771, -0.06646479666233063 ]
fff346b99b8e870bd7de2c6bd6e1c146841a536f
# Dataset Card for Evaluation run of daxiongshu/Pluto_24B_DPO_63 <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [daxiongshu/Pluto_24B_DPO_63](https://huggingface.co/daxiongshu/Pluto_24B_DPO_63) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_daxiongshu__Pluto_24B_DPO_63", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-01-25T14:41:24.929250](https://huggingface.co/datasets/open-llm-leaderboard/details_daxiongshu__Pluto_24B_DPO_63/blob/main/results_2024-01-25T14-41-24.929250.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.6498497884015774, "acc_stderr": 0.03220698512335376, "acc_norm": 0.6505024665977697, "acc_norm_stderr": 0.03286228741096377, "mc1": 0.6682986536107711, "mc1_stderr": 0.01648214881024148, "mc2": 0.7936098015130929, "mc2_stderr": 0.013242598947000108 }, "harness|arc:challenge|25": { "acc": 0.7064846416382252, "acc_stderr": 0.013307250444941122, "acc_norm": 0.7397610921501706, "acc_norm_stderr": 0.012821930225112563 }, "harness|hellaswag|10": { "acc": 0.6981676956781517, "acc_stderr": 0.004581147247963204, "acc_norm": 0.8816968731328421, "acc_norm_stderr": 0.0032230665918060006 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.35, "acc_stderr": 0.0479372485441102, "acc_norm": 0.35, "acc_norm_stderr": 0.0479372485441102 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.6222222222222222, "acc_stderr": 0.04188307537595853, "acc_norm": 0.6222222222222222, "acc_norm_stderr": 0.04188307537595853 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.7368421052631579, "acc_stderr": 0.03583496176361074, "acc_norm": 0.7368421052631579, "acc_norm_stderr": 0.03583496176361074 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.64, "acc_stderr": 0.04824181513244218, "acc_norm": 0.64, "acc_norm_stderr": 0.04824181513244218 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.6867924528301886, "acc_stderr": 0.028544793319055326, "acc_norm": 0.6867924528301886, "acc_norm_stderr": 0.028544793319055326 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.7708333333333334, "acc_stderr": 0.03514697467862388, "acc_norm": 0.7708333333333334, "acc_norm_stderr": 0.03514697467862388 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.47, "acc_stderr": 0.050161355804659205, "acc_norm": 0.47, "acc_norm_stderr": 0.050161355804659205 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.53, "acc_stderr": 0.050161355804659205, "acc_norm": 0.53, "acc_norm_stderr": 0.050161355804659205 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.32, "acc_stderr": 0.04688261722621504, "acc_norm": 0.32, "acc_norm_stderr": 0.04688261722621504 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.6589595375722543, "acc_stderr": 0.036146654241808254, "acc_norm": 0.6589595375722543, "acc_norm_stderr": 0.036146654241808254 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.37254901960784315, "acc_stderr": 0.04810840148082636, "acc_norm": 0.37254901960784315, "acc_norm_stderr": 0.04810840148082636 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.75, "acc_stderr": 0.04351941398892446, "acc_norm": 0.75, "acc_norm_stderr": 0.04351941398892446 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.5531914893617021, "acc_stderr": 0.032500536843658404, "acc_norm": 0.5531914893617021, "acc_norm_stderr": 0.032500536843658404 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.5263157894736842, "acc_stderr": 0.046970851366478626, "acc_norm": 0.5263157894736842, "acc_norm_stderr": 0.046970851366478626 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.5517241379310345, "acc_stderr": 0.04144311810878152, "acc_norm": 0.5517241379310345, "acc_norm_stderr": 0.04144311810878152 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.41005291005291006, "acc_stderr": 0.025331202438944427, "acc_norm": 0.41005291005291006, "acc_norm_stderr": 0.025331202438944427 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.5079365079365079, "acc_stderr": 0.044715725362943486, "acc_norm": 0.5079365079365079, "acc_norm_stderr": 0.044715725362943486 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.35, "acc_stderr": 0.04793724854411019, "acc_norm": 0.35, "acc_norm_stderr": 0.04793724854411019 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.7645161290322581, "acc_stderr": 0.02413763242933771, "acc_norm": 0.7645161290322581, "acc_norm_stderr": 0.02413763242933771 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.49261083743842365, "acc_stderr": 0.03517603540361008, "acc_norm": 0.49261083743842365, "acc_norm_stderr": 0.03517603540361008 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.7, "acc_stderr": 0.046056618647183814, "acc_norm": 0.7, "acc_norm_stderr": 0.046056618647183814 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.7636363636363637, "acc_stderr": 0.03317505930009182, "acc_norm": 0.7636363636363637, "acc_norm_stderr": 0.03317505930009182 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.797979797979798, "acc_stderr": 0.028606204289229872, "acc_norm": 0.797979797979798, "acc_norm_stderr": 0.028606204289229872 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.8860103626943006, "acc_stderr": 0.022935144053919432, "acc_norm": 0.8860103626943006, "acc_norm_stderr": 0.022935144053919432 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.6666666666666666, "acc_stderr": 0.023901157979402534, "acc_norm": 0.6666666666666666, "acc_norm_stderr": 0.023901157979402534 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.2962962962962963, "acc_stderr": 0.02784081149587193, "acc_norm": 0.2962962962962963, "acc_norm_stderr": 0.02784081149587193 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.6764705882352942, "acc_stderr": 0.03038835355188679, "acc_norm": 0.6764705882352942, "acc_norm_stderr": 0.03038835355188679 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.36423841059602646, "acc_stderr": 0.03929111781242742, "acc_norm": 0.36423841059602646, "acc_norm_stderr": 0.03929111781242742 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.8385321100917431, "acc_stderr": 0.015776239256163248, "acc_norm": 0.8385321100917431, "acc_norm_stderr": 0.015776239256163248 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.5555555555555556, "acc_stderr": 0.03388857118502325, "acc_norm": 0.5555555555555556, "acc_norm_stderr": 0.03388857118502325 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.8333333333333334, "acc_stderr": 0.026156867523931045, "acc_norm": 0.8333333333333334, "acc_norm_stderr": 0.026156867523931045 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.7974683544303798, "acc_stderr": 0.026160568246601446, "acc_norm": 0.7974683544303798, "acc_norm_stderr": 0.026160568246601446 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.6771300448430493, "acc_stderr": 0.031381476375754995, "acc_norm": 0.6771300448430493, "acc_norm_stderr": 0.031381476375754995 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.8015267175572519, "acc_stderr": 0.03498149385462471, "acc_norm": 0.8015267175572519, "acc_norm_stderr": 0.03498149385462471 }, "harness|hendrycksTest-international_law|5": { "acc": 0.768595041322314, "acc_stderr": 0.03849856098794088, "acc_norm": 0.768595041322314, "acc_norm_stderr": 0.03849856098794088 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.7592592592592593, "acc_stderr": 0.04133119440243838, "acc_norm": 0.7592592592592593, "acc_norm_stderr": 0.04133119440243838 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.7668711656441718, "acc_stderr": 0.0332201579577674, "acc_norm": 0.7668711656441718, "acc_norm_stderr": 0.0332201579577674 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.44642857142857145, "acc_stderr": 0.04718471485219588, "acc_norm": 0.44642857142857145, "acc_norm_stderr": 0.04718471485219588 }, "harness|hendrycksTest-management|5": { "acc": 0.7669902912621359, "acc_stderr": 0.04185832598928315, "acc_norm": 0.7669902912621359, "acc_norm_stderr": 0.04185832598928315 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8717948717948718, "acc_stderr": 0.021901905115073325, "acc_norm": 0.8717948717948718, "acc_norm_stderr": 0.021901905115073325 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.71, "acc_stderr": 0.045604802157206845, "acc_norm": 0.71, "acc_norm_stderr": 0.045604802157206845 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.8339719029374202, "acc_stderr": 0.013306478243066302, "acc_norm": 0.8339719029374202, "acc_norm_stderr": 0.013306478243066302 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.6936416184971098, "acc_stderr": 0.024818350129436596, "acc_norm": 0.6936416184971098, "acc_norm_stderr": 0.024818350129436596 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.4569832402234637, "acc_stderr": 0.01666049858050917, "acc_norm": 0.4569832402234637, "acc_norm_stderr": 0.01666049858050917 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.7058823529411765, "acc_stderr": 0.02609016250427905, "acc_norm": 0.7058823529411765, "acc_norm_stderr": 0.02609016250427905 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.7202572347266881, "acc_stderr": 0.02549425935069491, "acc_norm": 0.7202572347266881, "acc_norm_stderr": 0.02549425935069491 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.75, "acc_stderr": 0.02409347123262133, "acc_norm": 0.75, "acc_norm_stderr": 0.02409347123262133 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.4716312056737589, "acc_stderr": 0.029779450957303062, "acc_norm": 0.4716312056737589, "acc_norm_stderr": 0.029779450957303062 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.47327249022164275, "acc_stderr": 0.012751977967676012, "acc_norm": 0.47327249022164275, "acc_norm_stderr": 0.012751977967676012 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.6838235294117647, "acc_stderr": 0.028245687391462927, "acc_norm": 0.6838235294117647, "acc_norm_stderr": 0.028245687391462927 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.6797385620915033, "acc_stderr": 0.018875682938069443, "acc_norm": 0.6797385620915033, "acc_norm_stderr": 0.018875682938069443 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6727272727272727, "acc_stderr": 0.0449429086625209, "acc_norm": 0.6727272727272727, "acc_norm_stderr": 0.0449429086625209 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.7428571428571429, "acc_stderr": 0.02797982353874455, "acc_norm": 0.7428571428571429, "acc_norm_stderr": 0.02797982353874455 }, "harness|hendrycksTest-sociology|5": { "acc": 0.835820895522388, "acc_stderr": 0.026193923544454115, "acc_norm": 0.835820895522388, "acc_norm_stderr": 0.026193923544454115 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.85, "acc_stderr": 0.0358870281282637, "acc_norm": 0.85, "acc_norm_stderr": 0.0358870281282637 }, "harness|hendrycksTest-virology|5": { "acc": 0.536144578313253, "acc_stderr": 0.038823108508905954, "acc_norm": 0.536144578313253, "acc_norm_stderr": 0.038823108508905954 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.8421052631578947, "acc_stderr": 0.02796678585916089, "acc_norm": 0.8421052631578947, "acc_norm_stderr": 0.02796678585916089 }, "harness|truthfulqa:mc|0": { "mc1": 0.6682986536107711, "mc1_stderr": 0.01648214881024148, "mc2": 0.7936098015130929, "mc2_stderr": 0.013242598947000108 }, "harness|winogrande|5": { "acc": 0.8168902920284136, "acc_stderr": 0.010869778633168367 }, "harness|gsm8k|5": { "acc": 0.6611068991660348, "acc_stderr": 0.013037955768562514 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_daxiongshu__Pluto_24B_DPO_63
[ "region:us" ]
2024-01-25T14:43:41+00:00
{"pretty_name": "Evaluation run of daxiongshu/Pluto_24B_DPO_63", "dataset_summary": "Dataset automatically created during the evaluation run of model [daxiongshu/Pluto_24B_DPO_63](https://huggingface.co/daxiongshu/Pluto_24B_DPO_63) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_daxiongshu__Pluto_24B_DPO_63\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-01-25T14:41:24.929250](https://huggingface.co/datasets/open-llm-leaderboard/details_daxiongshu__Pluto_24B_DPO_63/blob/main/results_2024-01-25T14-41-24.929250.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.6498497884015774,\n \"acc_stderr\": 0.03220698512335376,\n \"acc_norm\": 0.6505024665977697,\n \"acc_norm_stderr\": 0.03286228741096377,\n \"mc1\": 0.6682986536107711,\n \"mc1_stderr\": 0.01648214881024148,\n \"mc2\": 0.7936098015130929,\n \"mc2_stderr\": 0.013242598947000108\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.7064846416382252,\n \"acc_stderr\": 0.013307250444941122,\n \"acc_norm\": 0.7397610921501706,\n \"acc_norm_stderr\": 0.012821930225112563\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6981676956781517,\n \"acc_stderr\": 0.004581147247963204,\n \"acc_norm\": 0.8816968731328421,\n \"acc_norm_stderr\": 0.0032230665918060006\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.35,\n \"acc_stderr\": 0.0479372485441102,\n \"acc_norm\": 0.35,\n \"acc_norm_stderr\": 0.0479372485441102\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.6222222222222222,\n \"acc_stderr\": 0.04188307537595853,\n \"acc_norm\": 0.6222222222222222,\n \"acc_norm_stderr\": 0.04188307537595853\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.7368421052631579,\n \"acc_stderr\": 0.03583496176361074,\n \"acc_norm\": 0.7368421052631579,\n \"acc_norm_stderr\": 0.03583496176361074\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.64,\n \"acc_stderr\": 0.04824181513244218,\n \"acc_norm\": 0.64,\n \"acc_norm_stderr\": 0.04824181513244218\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.6867924528301886,\n \"acc_stderr\": 0.028544793319055326,\n \"acc_norm\": 0.6867924528301886,\n \"acc_norm_stderr\": 0.028544793319055326\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.7708333333333334,\n \"acc_stderr\": 0.03514697467862388,\n \"acc_norm\": 0.7708333333333334,\n \"acc_norm_stderr\": 0.03514697467862388\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.47,\n \"acc_stderr\": 0.050161355804659205,\n \"acc_norm\": 0.47,\n \"acc_norm_stderr\": 0.050161355804659205\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.53,\n \"acc_stderr\": 0.050161355804659205,\n \"acc_norm\": 0.53,\n \"acc_norm_stderr\": 0.050161355804659205\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.32,\n \"acc_stderr\": 0.04688261722621504,\n \"acc_norm\": 0.32,\n \"acc_norm_stderr\": 0.04688261722621504\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.6589595375722543,\n \"acc_stderr\": 0.036146654241808254,\n \"acc_norm\": 0.6589595375722543,\n \"acc_norm_stderr\": 0.036146654241808254\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.37254901960784315,\n \"acc_stderr\": 0.04810840148082636,\n \"acc_norm\": 0.37254901960784315,\n \"acc_norm_stderr\": 0.04810840148082636\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.75,\n \"acc_stderr\": 0.04351941398892446,\n \"acc_norm\": 0.75,\n \"acc_norm_stderr\": 0.04351941398892446\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.5531914893617021,\n \"acc_stderr\": 0.032500536843658404,\n \"acc_norm\": 0.5531914893617021,\n \"acc_norm_stderr\": 0.032500536843658404\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.5263157894736842,\n \"acc_stderr\": 0.046970851366478626,\n \"acc_norm\": 0.5263157894736842,\n \"acc_norm_stderr\": 0.046970851366478626\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.5517241379310345,\n \"acc_stderr\": 0.04144311810878152,\n \"acc_norm\": 0.5517241379310345,\n \"acc_norm_stderr\": 0.04144311810878152\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.41005291005291006,\n \"acc_stderr\": 0.025331202438944427,\n \"acc_norm\": 0.41005291005291006,\n \"acc_norm_stderr\": 0.025331202438944427\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.5079365079365079,\n \"acc_stderr\": 0.044715725362943486,\n \"acc_norm\": 0.5079365079365079,\n \"acc_norm_stderr\": 0.044715725362943486\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.35,\n \"acc_stderr\": 0.04793724854411019,\n \"acc_norm\": 0.35,\n \"acc_norm_stderr\": 0.04793724854411019\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.7645161290322581,\n \"acc_stderr\": 0.02413763242933771,\n \"acc_norm\": 0.7645161290322581,\n \"acc_norm_stderr\": 0.02413763242933771\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.49261083743842365,\n \"acc_stderr\": 0.03517603540361008,\n \"acc_norm\": 0.49261083743842365,\n \"acc_norm_stderr\": 0.03517603540361008\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.7,\n \"acc_stderr\": 0.046056618647183814,\n \"acc_norm\": 0.7,\n \"acc_norm_stderr\": 0.046056618647183814\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.7636363636363637,\n \"acc_stderr\": 0.03317505930009182,\n \"acc_norm\": 0.7636363636363637,\n \"acc_norm_stderr\": 0.03317505930009182\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.797979797979798,\n \"acc_stderr\": 0.028606204289229872,\n \"acc_norm\": 0.797979797979798,\n \"acc_norm_stderr\": 0.028606204289229872\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.8860103626943006,\n \"acc_stderr\": 0.022935144053919432,\n \"acc_norm\": 0.8860103626943006,\n \"acc_norm_stderr\": 0.022935144053919432\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.6666666666666666,\n \"acc_stderr\": 0.023901157979402534,\n \"acc_norm\": 0.6666666666666666,\n \"acc_norm_stderr\": 0.023901157979402534\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.2962962962962963,\n \"acc_stderr\": 0.02784081149587193,\n \"acc_norm\": 0.2962962962962963,\n \"acc_norm_stderr\": 0.02784081149587193\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.6764705882352942,\n \"acc_stderr\": 0.03038835355188679,\n \"acc_norm\": 0.6764705882352942,\n \"acc_norm_stderr\": 0.03038835355188679\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.36423841059602646,\n \"acc_stderr\": 0.03929111781242742,\n \"acc_norm\": 0.36423841059602646,\n \"acc_norm_stderr\": 0.03929111781242742\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.8385321100917431,\n \"acc_stderr\": 0.015776239256163248,\n \"acc_norm\": 0.8385321100917431,\n \"acc_norm_stderr\": 0.015776239256163248\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.5555555555555556,\n \"acc_stderr\": 0.03388857118502325,\n \"acc_norm\": 0.5555555555555556,\n \"acc_norm_stderr\": 0.03388857118502325\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.8333333333333334,\n \"acc_stderr\": 0.026156867523931045,\n \"acc_norm\": 0.8333333333333334,\n \"acc_norm_stderr\": 0.026156867523931045\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.7974683544303798,\n \"acc_stderr\": 0.026160568246601446,\n \"acc_norm\": 0.7974683544303798,\n \"acc_norm_stderr\": 0.026160568246601446\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.6771300448430493,\n \"acc_stderr\": 0.031381476375754995,\n \"acc_norm\": 0.6771300448430493,\n \"acc_norm_stderr\": 0.031381476375754995\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.8015267175572519,\n \"acc_stderr\": 0.03498149385462471,\n \"acc_norm\": 0.8015267175572519,\n \"acc_norm_stderr\": 0.03498149385462471\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.768595041322314,\n \"acc_stderr\": 0.03849856098794088,\n \"acc_norm\": 0.768595041322314,\n \"acc_norm_stderr\": 0.03849856098794088\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.7592592592592593,\n \"acc_stderr\": 0.04133119440243838,\n \"acc_norm\": 0.7592592592592593,\n \"acc_norm_stderr\": 0.04133119440243838\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.7668711656441718,\n \"acc_stderr\": 0.0332201579577674,\n \"acc_norm\": 0.7668711656441718,\n \"acc_norm_stderr\": 0.0332201579577674\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.44642857142857145,\n \"acc_stderr\": 0.04718471485219588,\n \"acc_norm\": 0.44642857142857145,\n \"acc_norm_stderr\": 0.04718471485219588\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.7669902912621359,\n \"acc_stderr\": 0.04185832598928315,\n \"acc_norm\": 0.7669902912621359,\n \"acc_norm_stderr\": 0.04185832598928315\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8717948717948718,\n \"acc_stderr\": 0.021901905115073325,\n \"acc_norm\": 0.8717948717948718,\n \"acc_norm_stderr\": 0.021901905115073325\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.71,\n \"acc_stderr\": 0.045604802157206845,\n \"acc_norm\": 0.71,\n \"acc_norm_stderr\": 0.045604802157206845\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.8339719029374202,\n \"acc_stderr\": 0.013306478243066302,\n \"acc_norm\": 0.8339719029374202,\n \"acc_norm_stderr\": 0.013306478243066302\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.6936416184971098,\n \"acc_stderr\": 0.024818350129436596,\n \"acc_norm\": 0.6936416184971098,\n \"acc_norm_stderr\": 0.024818350129436596\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.4569832402234637,\n \"acc_stderr\": 0.01666049858050917,\n \"acc_norm\": 0.4569832402234637,\n \"acc_norm_stderr\": 0.01666049858050917\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.7058823529411765,\n \"acc_stderr\": 0.02609016250427905,\n \"acc_norm\": 0.7058823529411765,\n \"acc_norm_stderr\": 0.02609016250427905\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.7202572347266881,\n \"acc_stderr\": 0.02549425935069491,\n \"acc_norm\": 0.7202572347266881,\n \"acc_norm_stderr\": 0.02549425935069491\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.75,\n \"acc_stderr\": 0.02409347123262133,\n \"acc_norm\": 0.75,\n \"acc_norm_stderr\": 0.02409347123262133\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.4716312056737589,\n \"acc_stderr\": 0.029779450957303062,\n \"acc_norm\": 0.4716312056737589,\n \"acc_norm_stderr\": 0.029779450957303062\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.47327249022164275,\n \"acc_stderr\": 0.012751977967676012,\n \"acc_norm\": 0.47327249022164275,\n \"acc_norm_stderr\": 0.012751977967676012\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.6838235294117647,\n \"acc_stderr\": 0.028245687391462927,\n \"acc_norm\": 0.6838235294117647,\n \"acc_norm_stderr\": 0.028245687391462927\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.6797385620915033,\n \"acc_stderr\": 0.018875682938069443,\n \"acc_norm\": 0.6797385620915033,\n \"acc_norm_stderr\": 0.018875682938069443\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6727272727272727,\n \"acc_stderr\": 0.0449429086625209,\n \"acc_norm\": 0.6727272727272727,\n \"acc_norm_stderr\": 0.0449429086625209\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.7428571428571429,\n \"acc_stderr\": 0.02797982353874455,\n \"acc_norm\": 0.7428571428571429,\n \"acc_norm_stderr\": 0.02797982353874455\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.835820895522388,\n \"acc_stderr\": 0.026193923544454115,\n \"acc_norm\": 0.835820895522388,\n \"acc_norm_stderr\": 0.026193923544454115\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.85,\n \"acc_stderr\": 0.0358870281282637,\n \"acc_norm\": 0.85,\n \"acc_norm_stderr\": 0.0358870281282637\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.536144578313253,\n \"acc_stderr\": 0.038823108508905954,\n \"acc_norm\": 0.536144578313253,\n \"acc_norm_stderr\": 0.038823108508905954\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.8421052631578947,\n \"acc_stderr\": 0.02796678585916089,\n \"acc_norm\": 0.8421052631578947,\n \"acc_norm_stderr\": 0.02796678585916089\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.6682986536107711,\n \"mc1_stderr\": 0.01648214881024148,\n \"mc2\": 0.7936098015130929,\n \"mc2_stderr\": 0.013242598947000108\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.8168902920284136,\n \"acc_stderr\": 0.010869778633168367\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.6611068991660348,\n \"acc_stderr\": 0.013037955768562514\n }\n}\n```", "repo_url": "https://huggingface.co/daxiongshu/Pluto_24B_DPO_63", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_01_25T14_41_24.929250", "path": ["**/details_harness|arc:challenge|25_2024-01-25T14-41-24.929250.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-01-25T14-41-24.929250.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_01_25T14_41_24.929250", "path": ["**/details_harness|gsm8k|5_2024-01-25T14-41-24.929250.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-01-25T14-41-24.929250.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_01_25T14_41_24.929250", "path": ["**/details_harness|hellaswag|10_2024-01-25T14-41-24.929250.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-01-25T14-41-24.929250.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_01_25T14_41_24.929250", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-25T14-41-24.929250.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-25T14-41-24.929250.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-25T14-41-24.929250.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-25T14-41-24.929250.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-25T14-41-24.929250.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-25T14-41-24.929250.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-25T14-41-24.929250.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-25T14-41-24.929250.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-25T14-41-24.929250.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-25T14-41-24.929250.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-25T14-41-24.929250.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-25T14-41-24.929250.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-25T14-41-24.929250.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-25T14-41-24.929250.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-25T14-41-24.929250.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-25T14-41-24.929250.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-25T14-41-24.929250.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-25T14-41-24.929250.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-25T14-41-24.929250.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-25T14-41-24.929250.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-25T14-41-24.929250.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-25T14-41-24.929250.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-25T14-41-24.929250.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-25T14-41-24.929250.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-25T14-41-24.929250.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-25T14-41-24.929250.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-25T14-41-24.929250.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-25T14-41-24.929250.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-25T14-41-24.929250.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-25T14-41-24.929250.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-25T14-41-24.929250.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-25T14-41-24.929250.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-25T14-41-24.929250.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-25T14-41-24.929250.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-25T14-41-24.929250.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-25T14-41-24.929250.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-25T14-41-24.929250.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-25T14-41-24.929250.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-25T14-41-24.929250.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-25T14-41-24.929250.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-25T14-41-24.929250.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-25T14-41-24.929250.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-25T14-41-24.929250.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-25T14-41-24.929250.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-25T14-41-24.929250.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-25T14-41-24.929250.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-25T14-41-24.929250.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-25T14-41-24.929250.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-25T14-41-24.929250.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-25T14-41-24.929250.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-25T14-41-24.929250.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-25T14-41-24.929250.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-25T14-41-24.929250.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-25T14-41-24.929250.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-25T14-41-24.929250.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-25T14-41-24.929250.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-25T14-41-24.929250.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-25T14-41-24.929250.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-25T14-41-24.929250.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-25T14-41-24.929250.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-25T14-41-24.929250.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-25T14-41-24.929250.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-25T14-41-24.929250.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-25T14-41-24.929250.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-25T14-41-24.929250.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-25T14-41-24.929250.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-25T14-41-24.929250.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-25T14-41-24.929250.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-25T14-41-24.929250.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-25T14-41-24.929250.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-25T14-41-24.929250.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-25T14-41-24.929250.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-25T14-41-24.929250.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-25T14-41-24.929250.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-25T14-41-24.929250.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-25T14-41-24.929250.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-25T14-41-24.929250.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-25T14-41-24.929250.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-25T14-41-24.929250.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-25T14-41-24.929250.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-25T14-41-24.929250.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-25T14-41-24.929250.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-25T14-41-24.929250.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-25T14-41-24.929250.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-25T14-41-24.929250.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-25T14-41-24.929250.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-25T14-41-24.929250.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-25T14-41-24.929250.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-25T14-41-24.929250.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-25T14-41-24.929250.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-25T14-41-24.929250.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-25T14-41-24.929250.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-25T14-41-24.929250.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-25T14-41-24.929250.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-25T14-41-24.929250.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-25T14-41-24.929250.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-25T14-41-24.929250.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-25T14-41-24.929250.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-25T14-41-24.929250.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-25T14-41-24.929250.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-25T14-41-24.929250.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-25T14-41-24.929250.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-25T14-41-24.929250.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-25T14-41-24.929250.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-25T14-41-24.929250.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-25T14-41-24.929250.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-25T14-41-24.929250.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-25T14-41-24.929250.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-25T14-41-24.929250.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-25T14-41-24.929250.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-25T14-41-24.929250.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-25T14-41-24.929250.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-25T14-41-24.929250.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-25T14-41-24.929250.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_01_25T14_41_24.929250", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-25T14-41-24.929250.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-25T14-41-24.929250.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_01_25T14_41_24.929250", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-25T14-41-24.929250.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-25T14-41-24.929250.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_01_25T14_41_24.929250", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-25T14-41-24.929250.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-25T14-41-24.929250.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_01_25T14_41_24.929250", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-25T14-41-24.929250.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-25T14-41-24.929250.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_01_25T14_41_24.929250", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-25T14-41-24.929250.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-25T14-41-24.929250.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_01_25T14_41_24.929250", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-25T14-41-24.929250.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-25T14-41-24.929250.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_01_25T14_41_24.929250", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-25T14-41-24.929250.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-25T14-41-24.929250.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_01_25T14_41_24.929250", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-25T14-41-24.929250.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-25T14-41-24.929250.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_01_25T14_41_24.929250", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-25T14-41-24.929250.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-25T14-41-24.929250.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_01_25T14_41_24.929250", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-25T14-41-24.929250.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-25T14-41-24.929250.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_01_25T14_41_24.929250", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-25T14-41-24.929250.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-25T14-41-24.929250.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_01_25T14_41_24.929250", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-25T14-41-24.929250.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-25T14-41-24.929250.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_01_25T14_41_24.929250", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-25T14-41-24.929250.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-25T14-41-24.929250.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_01_25T14_41_24.929250", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-25T14-41-24.929250.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-25T14-41-24.929250.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_01_25T14_41_24.929250", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-25T14-41-24.929250.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-25T14-41-24.929250.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_01_25T14_41_24.929250", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-25T14-41-24.929250.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-25T14-41-24.929250.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_01_25T14_41_24.929250", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-25T14-41-24.929250.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-25T14-41-24.929250.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_01_25T14_41_24.929250", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-25T14-41-24.929250.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-25T14-41-24.929250.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_01_25T14_41_24.929250", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-25T14-41-24.929250.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-25T14-41-24.929250.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_01_25T14_41_24.929250", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-25T14-41-24.929250.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-25T14-41-24.929250.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_01_25T14_41_24.929250", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-25T14-41-24.929250.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-25T14-41-24.929250.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_01_25T14_41_24.929250", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-25T14-41-24.929250.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-25T14-41-24.929250.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_01_25T14_41_24.929250", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-25T14-41-24.929250.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-25T14-41-24.929250.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_01_25T14_41_24.929250", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-25T14-41-24.929250.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-25T14-41-24.929250.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_01_25T14_41_24.929250", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-25T14-41-24.929250.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-25T14-41-24.929250.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_01_25T14_41_24.929250", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-25T14-41-24.929250.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-25T14-41-24.929250.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_01_25T14_41_24.929250", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-25T14-41-24.929250.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-25T14-41-24.929250.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_01_25T14_41_24.929250", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-25T14-41-24.929250.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-25T14-41-24.929250.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_01_25T14_41_24.929250", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-25T14-41-24.929250.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-25T14-41-24.929250.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_01_25T14_41_24.929250", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-25T14-41-24.929250.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-25T14-41-24.929250.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_01_25T14_41_24.929250", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-25T14-41-24.929250.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-25T14-41-24.929250.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_01_25T14_41_24.929250", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-25T14-41-24.929250.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-25T14-41-24.929250.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_01_25T14_41_24.929250", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-25T14-41-24.929250.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-25T14-41-24.929250.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_01_25T14_41_24.929250", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-25T14-41-24.929250.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-25T14-41-24.929250.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_01_25T14_41_24.929250", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-25T14-41-24.929250.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-25T14-41-24.929250.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_01_25T14_41_24.929250", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-25T14-41-24.929250.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-25T14-41-24.929250.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_01_25T14_41_24.929250", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-25T14-41-24.929250.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-25T14-41-24.929250.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_01_25T14_41_24.929250", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-25T14-41-24.929250.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-25T14-41-24.929250.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_01_25T14_41_24.929250", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-25T14-41-24.929250.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-25T14-41-24.929250.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_01_25T14_41_24.929250", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-25T14-41-24.929250.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-25T14-41-24.929250.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_01_25T14_41_24.929250", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-25T14-41-24.929250.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-25T14-41-24.929250.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_01_25T14_41_24.929250", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-25T14-41-24.929250.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-25T14-41-24.929250.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_01_25T14_41_24.929250", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-25T14-41-24.929250.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-25T14-41-24.929250.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_01_25T14_41_24.929250", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-25T14-41-24.929250.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-25T14-41-24.929250.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_01_25T14_41_24.929250", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-25T14-41-24.929250.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-25T14-41-24.929250.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_01_25T14_41_24.929250", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-25T14-41-24.929250.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-25T14-41-24.929250.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_01_25T14_41_24.929250", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-25T14-41-24.929250.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-25T14-41-24.929250.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_01_25T14_41_24.929250", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-25T14-41-24.929250.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-25T14-41-24.929250.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_01_25T14_41_24.929250", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-25T14-41-24.929250.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-25T14-41-24.929250.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_01_25T14_41_24.929250", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-25T14-41-24.929250.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-25T14-41-24.929250.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_01_25T14_41_24.929250", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-25T14-41-24.929250.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-25T14-41-24.929250.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_01_25T14_41_24.929250", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-25T14-41-24.929250.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-25T14-41-24.929250.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_01_25T14_41_24.929250", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-25T14-41-24.929250.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-25T14-41-24.929250.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_01_25T14_41_24.929250", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-25T14-41-24.929250.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-25T14-41-24.929250.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_01_25T14_41_24.929250", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-25T14-41-24.929250.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-25T14-41-24.929250.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_01_25T14_41_24.929250", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-25T14-41-24.929250.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-25T14-41-24.929250.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_01_25T14_41_24.929250", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-25T14-41-24.929250.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-25T14-41-24.929250.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_01_25T14_41_24.929250", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-25T14-41-24.929250.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-25T14-41-24.929250.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_01_25T14_41_24.929250", "path": ["**/details_harness|winogrande|5_2024-01-25T14-41-24.929250.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-01-25T14-41-24.929250.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_01_25T14_41_24.929250", "path": ["results_2024-01-25T14-41-24.929250.parquet"]}, {"split": "latest", "path": ["results_2024-01-25T14-41-24.929250.parquet"]}]}]}
2024-01-25T14:44:05+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of daxiongshu/Pluto_24B_DPO_63 Dataset automatically created during the evaluation run of model daxiongshu/Pluto_24B_DPO_63 on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-01-25T14:41:24.929250(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of daxiongshu/Pluto_24B_DPO_63\n\n\n\nDataset automatically created during the evaluation run of model daxiongshu/Pluto_24B_DPO_63 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-25T14:41:24.929250(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of daxiongshu/Pluto_24B_DPO_63\n\n\n\nDataset automatically created during the evaluation run of model daxiongshu/Pluto_24B_DPO_63 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-25T14:41:24.929250(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ 6, 193, 68, 4, 40, 29, 3, 4, 9, 6, 5, 7, 4, 7, 10, 9, 5, 9, 8, 10, 46, 8, 7, 10, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of daxiongshu/Pluto_24B_DPO_63\n\n\n\nDataset automatically created during the evaluation run of model daxiongshu/Pluto_24B_DPO_63 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2024-01-25T14:41:24.929250(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):## Dataset Details### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:## Uses### Direct Use### Out-of-Scope Use## Dataset Structure## Dataset Creation### Curation Rationale### Source Data#### Data Collection and Processing#### Who are the source data producers?### Annotations [optional]#### Annotation process#### Who are the annotators?#### Personal and Sensitive Information## Bias, Risks, and Limitations### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:## Glossary [optional]## More Information [optional]## Dataset Card Authors [optional]" ]
[ -0.05199433118104935, 0.23019084334373474, -0.004577447660267353, 0.03267563134431839, 0.10062901675701141, -0.013093236833810806, 0.016393451020121574, 0.11155631393194199, -0.017954517155885696, 0.177032470703125, -0.013211488723754883, 0.08854323625564575, 0.09343625605106354, 0.11988316476345062, 0.021863259375095367, -0.14426189661026, 0.01623869314789772, -0.06481431424617767, 0.05192930996417999, 0.07832604646682739, 0.09063281118869781, -0.07887423783540726, 0.05485266447067261, -0.057451214641332626, -0.015353880822658539, 0.012427207082509995, -0.09944772720336914, -0.03843504935503006, 0.0862470343708992, 0.08506996929645538, 0.04321084916591644, -0.012601494789123535, 0.021695025265216827, -0.2535356879234314, 0.012961718253791332, 0.094215527176857, -0.0006731122266501188, 0.03384491056203842, 0.11478142440319061, -0.06198939308524132, 0.031058434396982193, -0.09423364698886871, 0.06811758875846863, 0.03824020177125931, -0.11594098806381226, -0.13630568981170654, -0.13353002071380615, 0.03008975088596344, 0.058401770889759064, 0.06010734289884567, -0.027012784034013748, 0.14430493116378784, -0.024192236363887787, 0.04048343747854233, 0.1106657087802887, -0.08638214319944382, -0.022635526955127716, 0.05352788418531418, 0.03545353561639786, 0.07629930973052979, -0.0900077223777771, -0.011253342032432556, 0.035217124968767166, 0.04477308690547943, 0.013309921137988567, -0.0030899138655513525, -0.056298479437828064, 0.011836783960461617, -0.13007235527038574, -0.11071532964706421, 0.18833595514297485, 0.005049696657806635, -0.03499195724725723, -0.16985760629177094, -0.026857636868953705, -0.004998702555894852, -0.0035012904554605484, -0.06449028849601746, 0.019908437505364418, -0.019074808806180954, 0.08922107517719269, -0.02575288899242878, -0.0928354561328888, -0.013794953003525734, 0.007373761851340532, 0.04081055521965027, 0.018958471715450287, -0.02983981743454933, -0.008253579027950764, 0.11165772378444672, -0.0161636620759964, -0.09547534584999084, -0.07164862751960754, -0.048338547348976135, -0.10675132274627686, -0.04406218230724335, 0.01659560389816761, -0.07058972865343094, 0.032500408589839935, 0.2163432538509369, -0.04489649087190628, 0.038513198494911194, -0.10088799893856049, -0.0057739075273275375, 0.11886917054653168, 0.06278274208307266, -0.07259877026081085, -0.061378058046102524, -0.01592443697154522, 0.02364256978034973, 0.031341176480054855, -0.016605060547590256, 0.01200125366449356, 0.072355255484581, 0.05345328897237778, 0.11954869329929352, 0.12318575382232666, 0.022853927686810493, -0.06459666788578033, -0.023307673633098602, 0.22253566980361938, -0.14833584427833557, -0.013890077359974384, -0.002950079506263137, -0.035812970250844955, -0.09323199093341827, 0.07652878761291504, 0.007158758584409952, -0.05395161360502243, 0.11978226155042648, -0.04920099675655365, -0.08175015449523926, -0.06655743718147278, -0.04749145731329918, 0.06574457138776779, -0.027542496100068092, -0.022369027137756348, -0.07809560745954514, -0.11164146661758423, -0.07926693558692932, 0.013695934787392616, -0.06591595709323883, -0.03886435925960541, 0.039295025169849396, -0.013522569090127945, -0.025273893028497696, -0.0127248615026474, 0.10848665982484818, -0.04466795176267624, 0.032012343406677246, 0.022623516619205475, 0.0029174936935305595, 0.07382353395223618, 0.05252107232809067, -0.12358543276786804, 0.08582741767168045, -0.12476278841495514, 0.10916097462177277, -0.10793302953243256, -0.012981257401406765, -0.14020495116710663, -0.005451612174510956, -0.04210474714636803, 0.009394418448209763, 0.005486737005412579, 0.10708916187286377, -0.2291010320186615, 0.01981547474861145, 0.1292644590139389, -0.10181108117103577, -0.10036379098892212, 0.06963491439819336, -0.04221828281879425, 0.08571331202983856, 0.059471771121025085, 0.0993235856294632, 0.11598270386457443, -0.09099455177783966, -0.1174091100692749, -0.07099960744380951, -0.02341863512992859, 0.15648722648620605, 0.06369730085134506, -0.06435905396938324, 0.12537942826747894, 0.0431017242372036, -0.02350115031003952, -0.09968146681785583, -0.010119138285517693, -0.06626211106777191, -0.02049371227622032, -0.04697205871343613, -0.08211031556129456, 0.012221727520227432, -0.0856068953871727, -0.01907484605908394, -0.09107580780982971, 0.028298210352659225, 0.08403988927602768, -0.018751230090856552, 0.009284084662795067, -0.04940938577055931, 0.0340365469455719, 0.0014152207877486944, 0.024197440594434738, -0.214034765958786, -0.11708468943834305, 0.04316386580467224, -0.13643944263458252, 0.05828968435525894, 0.03304928541183472, 0.0068227630108594894, 0.036572545766830444, -0.018761761486530304, 0.019941268488764763, 0.005512320436537266, 0.0016925918171182275, -0.006439882796257734, -0.13102000951766968, -0.03958263248205185, -0.08285406976938248, 0.1097068339586258, -0.13137534260749817, -0.01732444390654564, 0.07378444075584412, 0.16059499979019165, -0.007408588193356991, -0.07620950043201447, 0.07965497672557831, 0.0015839686384424567, -0.02493513748049736, -0.06719376146793365, 0.001187662361189723, -0.017163332551717758, 0.042463935911655426, 0.02249259501695633, -0.1949743628501892, -0.1412561982870102, 0.07824001461267471, 0.13476058840751648, -0.08606594800949097, -0.07188329100608826, -0.058461301028728485, -0.06907682120800018, -0.08418026566505432, -0.07338615506887436, 0.08788241446018219, 0.06936927139759064, 0.033339034765958786, -0.0751500129699707, -0.05777044594287872, 0.011480464600026608, 0.05169752240180969, -0.07617746293544769, 0.09415740519762039, 0.06290905922651291, -0.11210529506206512, 0.11796009540557861, -0.02002631314098835, 0.13075220584869385, 0.06901402771472931, 0.02249913476407528, -0.09269404411315918, -0.006979213561862707, 0.05723674222826958, 0.027129385620355606, 0.08247026056051254, -0.035141654312610626, 0.038947075605392456, 0.07447123527526855, -0.01582648605108261, 0.041373394429683685, -0.04720022529363632, 0.02877156250178814, 0.0471552312374115, 0.0014134414959698915, 0.02661154232919216, 0.0013986080884933472, 0.0020285851787775755, 0.06175755709409714, 0.03739987313747406, 0.1151551604270935, -0.008014681749045849, -0.03046971932053566, -0.09442362189292908, 0.1280553936958313, -0.09859245270490646, -0.30550581216812134, -0.13487908244132996, -0.01843506656587124, -0.0351257361471653, -0.015431663952767849, 0.06524154543876648, -0.023752015084028244, -0.08777128905057907, -0.08617940545082092, 0.0372961089015007, -0.00703989714384079, -0.13589943945407867, -0.05941550433635712, 0.056173332035541534, 0.006831149570643902, -0.16498056054115295, 0.03972452133893967, 0.04917865991592407, -0.024919133633375168, -0.0003854651004076004, 0.08239498734474182, 0.1445101946592331, 0.06960061192512512, 0.05102057009935379, -0.029784340411424637, -0.0073790159076452255, 0.1718142032623291, -0.11431768536567688, 0.04055343195796013, 0.12303608655929565, -0.03860946372151375, 0.06996748596429825, 0.18084794282913208, 0.0033824369311332703, -0.0976422131061554, 0.04506755620241165, 0.09148608148097992, -0.06814730167388916, -0.2535763680934906, -0.09878723323345184, -0.021201901137828827, -0.008459264412522316, 0.09677466750144958, 0.06413653492927551, 0.014801128767430782, 0.027721602469682693, -0.11146733164787292, -0.013062539510428905, -0.06726208329200745, 0.0767604261636734, 0.06796029210090637, -0.006034155376255512, 0.044158853590488434, -0.041066523641347885, 0.020660938695073128, 0.11147986352443695, 0.05285138636827469, 0.14592649042606354, -0.03189297765493393, 0.1633290946483612, 0.07611173391342163, 0.09441307187080383, -0.0475391261279583, 0.03992395102977753, 0.020985189825296402, 0.07190743088722229, -0.013293365016579628, -0.10169191658496857, -0.05271586403250694, 0.09454600512981415, 0.015005875378847122, -0.07515230029821396, 0.030184075236320496, -0.042672328650951385, 0.02871335670351982, 0.1891392469406128, -0.0289950892329216, -0.16055181622505188, -0.05815120041370392, 0.055500857532024384, -0.003193469252437353, -0.09796957671642303, -0.036309048533439636, 0.05132632330060005, -0.14942921698093414, 0.03338789567351341, -0.02053198218345642, 0.0813322514295578, -0.12730440497398376, -0.016866270452737808, -0.028602879494428635, 0.04730856046080589, 0.003035222878679633, 0.121467724442482, -0.14546862244606018, 0.10712753236293793, 0.007999269291758537, 0.025401338934898376, -0.10421659052371979, 0.055894020944833755, -0.052066028118133545, -0.043851256370544434, 0.1548708975315094, -0.013001669198274612, -0.07686720788478851, -0.048928093165159225, -0.11489246785640717, 0.002288761315867305, 0.08636573702096939, -0.1174180880188942, 0.09473173320293427, 0.039061039686203, -0.018199805170297623, -0.020103048533201218, -0.017804250121116638, -0.14344432950019836, -0.2251397669315338, 0.11550427228212357, -0.11190089583396912, 0.0645374208688736, -0.04198334366083145, -0.03208449110388756, -0.05061228573322296, 0.18193775415420532, -0.059782713651657104, -0.07030656188726425, -0.12361691147089005, 0.03487556427717209, 0.1780925989151001, -0.04764639586210251, 0.049827657639980316, -0.05653750151395798, 0.17503690719604492, -0.0065596988424658775, -0.0498276948928833, -0.0016014758730307221, -0.09146276861429214, -0.14813858270645142, -0.05004849657416344, 0.13263165950775146, 0.0655120387673378, 0.009803837165236473, 0.008660203777253628, 0.03846720606088638, 0.019343633204698563, -0.08692735433578491, 0.02989574335515499, 0.07964823395013809, 0.12653547525405884, 0.03811902552843094, -0.042673856019973755, -0.08636845648288727, -0.11257907748222351, -0.09163003414869308, 0.06390508264303207, 0.12038782238960266, -0.0582803338766098, 0.1485861986875534, 0.14766108989715576, -0.10280260443687439, -0.19207492470741272, -0.04680224880576134, 0.037967123091220856, -0.028734996914863586, 0.0905400812625885, -0.2058853656053543, 0.0714617371559143, 0.057897601276636124, -0.0062653785571455956, 0.0876474529504776, -0.22967615723609924, -0.12976686656475067, 0.03112073242664337, 0.013038165867328644, -0.2382293939590454, -0.18352995812892914, -0.11068578064441681, -0.031863510608673096, -0.16739031672477722, 0.14479747414588928, -0.013904553838074207, 0.02114338055253029, -0.004029855132102966, 0.059201136231422424, 0.05320969596505165, -0.05824483931064606, 0.13381218910217285, 0.009522662498056889, 0.009053039364516735, -0.10293357819318771, -0.015181954018771648, 0.033010970801115036, -0.04457945376634598, 0.11431625485420227, 0.05544561147689819, 0.059160251170396805, -0.08101023733615875, -0.04137677699327469, -0.05078604817390442, 0.05735619366168976, -0.07441941648721695, -0.06971688568592072, -0.06814815104007721, 0.08169776201248169, 0.08131128549575806, -0.022044964134693146, 0.005489732138812542, -0.029325447976589203, 0.04229119420051575, 0.21632209420204163, 0.10459919273853302, 0.05893871188163757, -0.13808143138885498, -0.015542462468147278, -0.012404613196849823, 0.0019453139975667, -0.13783100247383118, 0.03370821103453636, 0.09338244050741196, 0.05252380296587944, 0.07108688354492188, -0.03561363369226456, -0.19567787647247314, -0.007356011774390936, 0.06326234340667725, -0.10319371521472931, -0.21688640117645264, 0.025229137390851974, 0.11873002350330353, -0.138532817363739, -0.06423163414001465, 0.0887192040681839, 0.008539342321455479, -0.030104056000709534, 0.0016304545570164919, 0.06987413763999939, 0.04796641692519188, 0.10362139344215393, 0.00952901877462864, 0.051132600754499435, -0.07648870348930359, 0.12027618288993835, 0.1527693271636963, -0.14034685492515564, 0.03513437137007713, 0.0574159175157547, -0.05871699005365372, -0.06266587227582932, 0.015120905824005604, 0.007419053930789232, 0.020399045199155807, -0.048828091472387314, 0.02583293989300728, -0.017504151910543442, 0.0445818230509758, 0.1002364456653595, -0.00029655947582796216, 0.028546089306473732, 0.031335167586803436, -0.010067280381917953, -0.09890373051166534, 0.08897045254707336, 0.0413455069065094, 0.044334061443805695, -0.044400405138731, 0.0117503572255373, 0.029324650764465332, 0.008165418170392513, 0.015056293457746506, -0.035226088017225266, -0.04647378623485565, -0.01002083346247673, -0.1461799144744873, 0.021547209471464157, -0.06823074072599411, -0.0018150662072002888, -0.01575574465095997, -0.01609095185995102, -0.010343344882130623, 0.013369297608733177, -0.04838914796710014, -0.0684647187590599, -0.04158010333776474, 0.11819078028202057, -0.19893240928649902, -0.014980253763496876, 0.09270663559436798, -0.06556807458400726, 0.07104094326496124, 0.013214444741606712, -0.02009204402565956, -0.00030637066811323166, -0.0576871782541275, -0.005104304291307926, -0.01029907539486885, 0.055738404393196106, 0.01056006271392107, -0.16530609130859375, -0.012943669222295284, 0.011537943035364151, -0.07801894843578339, -0.00986872985959053, 0.060506176203489304, -0.15149971842765808, 0.01755904033780098, 0.04909038916230202, -0.0316716693341732, -0.042480774223804474, 0.044082269072532654, 0.06414588540792465, 0.006298297084867954, 0.08224217593669891, -0.002506716176867485, 0.050161510705947876, -0.15857937932014465, -0.0588822104036808, -0.011139579117298126, 0.0062772054225206375, 0.022249266505241394, 0.0298914797604084, 0.04274576157331467, -0.0006498291622847319, 0.2072317749261856, -0.01921427994966507, 0.08931423723697662, 0.031886521726846695, 0.007115609012544155, -0.041768502444028854, 0.022412847727537155, 0.031471606343984604, 0.019901514053344727, 0.015057557262480259, 0.026106514036655426, -0.022156676277518272, -0.03663289174437523, -0.045962847769260406, 0.05662482976913452, 0.165254145860672, 0.16576410830020905, -0.04356414079666138, 0.07997338473796844, -0.16611894965171814, -0.055828582495450974, 0.035452090203762054, -0.03785412758588791, 0.03396572172641754, -0.06598521023988724, 0.03713306039571762, 0.07179754972457886, -0.10279543697834015, 0.14394572377204895, -0.06745025515556335, -0.047886140644550323, -0.021352004259824753, -0.12475001811981201, -0.043566346168518066, 0.02710496261715889, 0.011135171167552471, -0.10116337239742279, 0.09300880134105682, 0.11891920864582062, -0.015793997794389725, -0.018435029312968254, 0.12203468382358551, -0.061015285551548004, -0.07845333963632584, -0.023759016767144203, 0.01883169263601303, 0.023796599358320236, -0.0025288122706115246, 0.08221486210823059, 0.021795228123664856, 0.09294812381267548, 0.061900436878204346, 0.09582054615020752, 0.0589480996131897, 0.01755838468670845, -0.040776144713163376, -0.05673203617334366, -0.005734735168516636, 0.0020981431007385254, -0.046602100133895874, 0.2101350724697113, 0.0401216521859169, 0.017616840079426765, 0.004860045854002237, 0.1913803219795227, 0.00760266650468111, -0.06856685876846313, -0.1254073977470398, 0.11557086557149887, -0.006857199128717184, 0.023368563503026962, 0.037176549434661865, -0.13792049884796143, 0.02553948201239109, 0.16553017497062683, 0.11343777179718018, 0.035546258091926575, 0.009690148755908012, 0.029015567153692245, 0.027187298983335495, -0.03916216269135475, 0.033792994916439056, 0.04753471538424492, 0.17271064221858978, -0.06521783024072647, 0.06564047187566757, -0.016022536903619766, -0.020573392510414124, -0.03516871854662895, 0.09460611641407013, -0.032503753900527954, 0.016304409131407738, -0.03996933251619339, 0.10882969945669174, -0.038001518696546555, -0.29878246784210205, -0.013628439977765083, -0.10257522761821747, -0.1325601041316986, -0.025889113545417786, 0.01183897815644741, -0.02698996290564537, 0.025847969576716423, 0.04132384806871414, -0.01851915568113327, 0.20518872141838074, 0.01958274096250534, -0.08166444301605225, -0.05036287009716034, 0.0684128999710083, -0.01549177709966898, 0.2374904453754425, -0.00568377086892724, 0.07060608267784119, 0.0952097624540329, -0.015937168151140213, -0.15918119251728058, 0.02177438884973526, 0.11240047961473465, -0.02855052426457405, 0.05715987831354141, 0.1567612886428833, -0.029237087815999985, 0.10610659420490265, 0.050766002386808395, -0.02929895557463169, 0.04725751280784607, 0.08176586031913757, 0.051229558885097504, -0.10878665745258331, 0.09083256125450134, -0.09787914156913757, 0.13908976316452026, 0.11202332377433777, -0.02729763090610504, 0.002453241962939501, -0.07579146325588226, 0.06157907098531723, -0.01688242517411709, 0.10701876878738403, 0.004528689198195934, -0.15590190887451172, 0.037918247282505035, 0.03659958392381668, 0.0668991282582283, -0.22595494985580444, -0.07028844952583313, 0.13901042938232422, -0.042970508337020874, 0.014160659164190292, 0.08317402750253677, 0.020145457237958908, 0.00821363739669323, -0.06421573460102081, -0.09974074363708496, -0.012214593589305878, 0.11914940178394318, -0.09505762904882431, -0.030972199514508247 ]
a46393bb07b83c23d4988e25416807b502a6319b
# Dataset Card for Evaluation run of Steelskull/Umbra-v2.1-MoE-4x10.7 <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [Steelskull/Umbra-v2.1-MoE-4x10.7](https://huggingface.co/Steelskull/Umbra-v2.1-MoE-4x10.7) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_Steelskull__Umbra-v2.1-MoE-4x10.7", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-01-25T14:47:58.441546](https://huggingface.co/datasets/open-llm-leaderboard/details_Steelskull__Umbra-v2.1-MoE-4x10.7/blob/main/results_2024-01-25T14-47-58.441546.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.6682580543038971, "acc_stderr": 0.03146769295303491, "acc_norm": 0.6687787171629506, "acc_norm_stderr": 0.03211181431193431, "mc1": 0.5152998776009792, "mc1_stderr": 0.0174953044731879, "mc2": 0.6656920159558114, "mc2_stderr": 0.015499118988583797 }, "harness|arc:challenge|25": { "acc": 0.6629692832764505, "acc_stderr": 0.01381347665290228, "acc_norm": 0.6911262798634812, "acc_norm_stderr": 0.013501770929344003 }, "harness|hellaswag|10": { "acc": 0.6916948814977096, "acc_stderr": 0.004608495469860379, "acc_norm": 0.8757219677355108, "acc_norm_stderr": 0.0032922425436373434 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.42, "acc_stderr": 0.049604496374885836, "acc_norm": 0.42, "acc_norm_stderr": 0.049604496374885836 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.6296296296296297, "acc_stderr": 0.041716541613545426, "acc_norm": 0.6296296296296297, "acc_norm_stderr": 0.041716541613545426 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.75, "acc_stderr": 0.03523807393012047, "acc_norm": 0.75, "acc_norm_stderr": 0.03523807393012047 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.73, "acc_stderr": 0.0446196043338474, "acc_norm": 0.73, "acc_norm_stderr": 0.0446196043338474 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.6716981132075471, "acc_stderr": 0.02890159361241178, "acc_norm": 0.6716981132075471, "acc_norm_stderr": 0.02890159361241178 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.8125, "acc_stderr": 0.032639560491693344, "acc_norm": 0.8125, "acc_norm_stderr": 0.032639560491693344 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.42, "acc_stderr": 0.049604496374885836, "acc_norm": 0.42, "acc_norm_stderr": 0.049604496374885836 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.53, "acc_stderr": 0.05016135580465919, "acc_norm": 0.53, "acc_norm_stderr": 0.05016135580465919 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.34, "acc_stderr": 0.04760952285695235, "acc_norm": 0.34, "acc_norm_stderr": 0.04760952285695235 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.653179190751445, "acc_stderr": 0.036291466701596636, "acc_norm": 0.653179190751445, "acc_norm_stderr": 0.036291466701596636 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.38235294117647056, "acc_stderr": 0.04835503696107223, "acc_norm": 0.38235294117647056, "acc_norm_stderr": 0.04835503696107223 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.76, "acc_stderr": 0.04292346959909281, "acc_norm": 0.76, "acc_norm_stderr": 0.04292346959909281 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.5957446808510638, "acc_stderr": 0.03208115750788684, "acc_norm": 0.5957446808510638, "acc_norm_stderr": 0.03208115750788684 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.5175438596491229, "acc_stderr": 0.04700708033551038, "acc_norm": 0.5175438596491229, "acc_norm_stderr": 0.04700708033551038 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.6, "acc_stderr": 0.040824829046386284, "acc_norm": 0.6, "acc_norm_stderr": 0.040824829046386284 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.4894179894179894, "acc_stderr": 0.02574554227604548, "acc_norm": 0.4894179894179894, "acc_norm_stderr": 0.02574554227604548 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.40476190476190477, "acc_stderr": 0.04390259265377561, "acc_norm": 0.40476190476190477, "acc_norm_stderr": 0.04390259265377561 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.36, "acc_stderr": 0.04824181513244218, "acc_norm": 0.36, "acc_norm_stderr": 0.04824181513244218 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.7967741935483871, "acc_stderr": 0.02289168798455495, "acc_norm": 0.7967741935483871, "acc_norm_stderr": 0.02289168798455495 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.5270935960591133, "acc_stderr": 0.03512819077876106, "acc_norm": 0.5270935960591133, "acc_norm_stderr": 0.03512819077876106 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.69, "acc_stderr": 0.04648231987117316, "acc_norm": 0.69, "acc_norm_stderr": 0.04648231987117316 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.8121212121212121, "acc_stderr": 0.03050193405942914, "acc_norm": 0.8121212121212121, "acc_norm_stderr": 0.03050193405942914 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.8383838383838383, "acc_stderr": 0.026225919863629276, "acc_norm": 0.8383838383838383, "acc_norm_stderr": 0.026225919863629276 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.9119170984455959, "acc_stderr": 0.02045374660160103, "acc_norm": 0.9119170984455959, "acc_norm_stderr": 0.02045374660160103 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.6743589743589744, "acc_stderr": 0.02375966576741229, "acc_norm": 0.6743589743589744, "acc_norm_stderr": 0.02375966576741229 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.35185185185185186, "acc_stderr": 0.02911661760608302, "acc_norm": 0.35185185185185186, "acc_norm_stderr": 0.02911661760608302 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.6890756302521008, "acc_stderr": 0.03006676158297794, "acc_norm": 0.6890756302521008, "acc_norm_stderr": 0.03006676158297794 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.3708609271523179, "acc_stderr": 0.03943966699183629, "acc_norm": 0.3708609271523179, "acc_norm_stderr": 0.03943966699183629 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.8403669724770643, "acc_stderr": 0.015703498348461783, "acc_norm": 0.8403669724770643, "acc_norm_stderr": 0.015703498348461783 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.5462962962962963, "acc_stderr": 0.033953227263757976, "acc_norm": 0.5462962962962963, "acc_norm_stderr": 0.033953227263757976 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.8676470588235294, "acc_stderr": 0.023784297520918863, "acc_norm": 0.8676470588235294, "acc_norm_stderr": 0.023784297520918863 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.8565400843881856, "acc_stderr": 0.022818291821017012, "acc_norm": 0.8565400843881856, "acc_norm_stderr": 0.022818291821017012 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.7309417040358744, "acc_stderr": 0.029763779406874965, "acc_norm": 0.7309417040358744, "acc_norm_stderr": 0.029763779406874965 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.732824427480916, "acc_stderr": 0.038808483010823944, "acc_norm": 0.732824427480916, "acc_norm_stderr": 0.038808483010823944 }, "harness|hendrycksTest-international_law|5": { "acc": 0.8264462809917356, "acc_stderr": 0.03457272836917669, "acc_norm": 0.8264462809917356, "acc_norm_stderr": 0.03457272836917669 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.8240740740740741, "acc_stderr": 0.03680918141673881, "acc_norm": 0.8240740740740741, "acc_norm_stderr": 0.03680918141673881 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.7484662576687117, "acc_stderr": 0.03408997886857529, "acc_norm": 0.7484662576687117, "acc_norm_stderr": 0.03408997886857529 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.48214285714285715, "acc_stderr": 0.047427623612430116, "acc_norm": 0.48214285714285715, "acc_norm_stderr": 0.047427623612430116 }, "harness|hendrycksTest-management|5": { "acc": 0.8252427184466019, "acc_stderr": 0.03760178006026621, "acc_norm": 0.8252427184466019, "acc_norm_stderr": 0.03760178006026621 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8846153846153846, "acc_stderr": 0.020930193185179333, "acc_norm": 0.8846153846153846, "acc_norm_stderr": 0.020930193185179333 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.79, "acc_stderr": 0.040936018074033256, "acc_norm": 0.79, "acc_norm_stderr": 0.040936018074033256 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.8263090676883781, "acc_stderr": 0.013547415658662255, "acc_norm": 0.8263090676883781, "acc_norm_stderr": 0.013547415658662255 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.7312138728323699, "acc_stderr": 0.02386800326250011, "acc_norm": 0.7312138728323699, "acc_norm_stderr": 0.02386800326250011 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.39776536312849164, "acc_stderr": 0.016369204971262975, "acc_norm": 0.39776536312849164, "acc_norm_stderr": 0.016369204971262975 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.7777777777777778, "acc_stderr": 0.02380518652488815, "acc_norm": 0.7777777777777778, "acc_norm_stderr": 0.02380518652488815 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.7202572347266881, "acc_stderr": 0.02549425935069491, "acc_norm": 0.7202572347266881, "acc_norm_stderr": 0.02549425935069491 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.7623456790123457, "acc_stderr": 0.02368359183700856, "acc_norm": 0.7623456790123457, "acc_norm_stderr": 0.02368359183700856 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.5141843971631206, "acc_stderr": 0.02981549448368206, "acc_norm": 0.5141843971631206, "acc_norm_stderr": 0.02981549448368206 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.5, "acc_stderr": 0.012770236105969923, "acc_norm": 0.5, "acc_norm_stderr": 0.012770236105969923 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.7169117647058824, "acc_stderr": 0.027365861131513812, "acc_norm": 0.7169117647058824, "acc_norm_stderr": 0.027365861131513812 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.6895424836601307, "acc_stderr": 0.018718067052623227, "acc_norm": 0.6895424836601307, "acc_norm_stderr": 0.018718067052623227 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.7, "acc_stderr": 0.04389311454644286, "acc_norm": 0.7, "acc_norm_stderr": 0.04389311454644286 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.746938775510204, "acc_stderr": 0.027833023871399687, "acc_norm": 0.746938775510204, "acc_norm_stderr": 0.027833023871399687 }, "harness|hendrycksTest-sociology|5": { "acc": 0.8557213930348259, "acc_stderr": 0.024845753212306053, "acc_norm": 0.8557213930348259, "acc_norm_stderr": 0.024845753212306053 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.9, "acc_stderr": 0.03015113445777634, "acc_norm": 0.9, "acc_norm_stderr": 0.03015113445777634 }, "harness|hendrycksTest-virology|5": { "acc": 0.5602409638554217, "acc_stderr": 0.03864139923699122, "acc_norm": 0.5602409638554217, "acc_norm_stderr": 0.03864139923699122 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.8070175438596491, "acc_stderr": 0.030267457554898458, "acc_norm": 0.8070175438596491, "acc_norm_stderr": 0.030267457554898458 }, "harness|truthfulqa:mc|0": { "mc1": 0.5152998776009792, "mc1_stderr": 0.0174953044731879, "mc2": 0.6656920159558114, "mc2_stderr": 0.015499118988583797 }, "harness|winogrande|5": { "acc": 0.8310970797158642, "acc_stderr": 0.01052998141183891 }, "harness|gsm8k|5": { "acc": 0.6868840030326004, "acc_stderr": 0.01277428566938509 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_Steelskull__Umbra-v2.1-MoE-4x10.7
[ "region:us" ]
2024-01-25T14:50:18+00:00
{"pretty_name": "Evaluation run of Steelskull/Umbra-v2.1-MoE-4x10.7", "dataset_summary": "Dataset automatically created during the evaluation run of model [Steelskull/Umbra-v2.1-MoE-4x10.7](https://huggingface.co/Steelskull/Umbra-v2.1-MoE-4x10.7) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_Steelskull__Umbra-v2.1-MoE-4x10.7\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-01-25T14:47:58.441546](https://huggingface.co/datasets/open-llm-leaderboard/details_Steelskull__Umbra-v2.1-MoE-4x10.7/blob/main/results_2024-01-25T14-47-58.441546.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.6682580543038971,\n \"acc_stderr\": 0.03146769295303491,\n \"acc_norm\": 0.6687787171629506,\n \"acc_norm_stderr\": 0.03211181431193431,\n \"mc1\": 0.5152998776009792,\n \"mc1_stderr\": 0.0174953044731879,\n \"mc2\": 0.6656920159558114,\n \"mc2_stderr\": 0.015499118988583797\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.6629692832764505,\n \"acc_stderr\": 0.01381347665290228,\n \"acc_norm\": 0.6911262798634812,\n \"acc_norm_stderr\": 0.013501770929344003\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6916948814977096,\n \"acc_stderr\": 0.004608495469860379,\n \"acc_norm\": 0.8757219677355108,\n \"acc_norm_stderr\": 0.0032922425436373434\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.42,\n \"acc_stderr\": 0.049604496374885836,\n \"acc_norm\": 0.42,\n \"acc_norm_stderr\": 0.049604496374885836\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.6296296296296297,\n \"acc_stderr\": 0.041716541613545426,\n \"acc_norm\": 0.6296296296296297,\n \"acc_norm_stderr\": 0.041716541613545426\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.75,\n \"acc_stderr\": 0.03523807393012047,\n \"acc_norm\": 0.75,\n \"acc_norm_stderr\": 0.03523807393012047\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.73,\n \"acc_stderr\": 0.0446196043338474,\n \"acc_norm\": 0.73,\n \"acc_norm_stderr\": 0.0446196043338474\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.6716981132075471,\n \"acc_stderr\": 0.02890159361241178,\n \"acc_norm\": 0.6716981132075471,\n \"acc_norm_stderr\": 0.02890159361241178\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.8125,\n \"acc_stderr\": 0.032639560491693344,\n \"acc_norm\": 0.8125,\n \"acc_norm_stderr\": 0.032639560491693344\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.42,\n \"acc_stderr\": 0.049604496374885836,\n \"acc_norm\": 0.42,\n \"acc_norm_stderr\": 0.049604496374885836\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.53,\n \"acc_stderr\": 0.05016135580465919,\n \"acc_norm\": 0.53,\n \"acc_norm_stderr\": 0.05016135580465919\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.34,\n \"acc_stderr\": 0.04760952285695235,\n \"acc_norm\": 0.34,\n \"acc_norm_stderr\": 0.04760952285695235\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.653179190751445,\n \"acc_stderr\": 0.036291466701596636,\n \"acc_norm\": 0.653179190751445,\n \"acc_norm_stderr\": 0.036291466701596636\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.38235294117647056,\n \"acc_stderr\": 0.04835503696107223,\n \"acc_norm\": 0.38235294117647056,\n \"acc_norm_stderr\": 0.04835503696107223\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.76,\n \"acc_stderr\": 0.04292346959909281,\n \"acc_norm\": 0.76,\n \"acc_norm_stderr\": 0.04292346959909281\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.5957446808510638,\n \"acc_stderr\": 0.03208115750788684,\n \"acc_norm\": 0.5957446808510638,\n \"acc_norm_stderr\": 0.03208115750788684\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.5175438596491229,\n \"acc_stderr\": 0.04700708033551038,\n \"acc_norm\": 0.5175438596491229,\n \"acc_norm_stderr\": 0.04700708033551038\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.6,\n \"acc_stderr\": 0.040824829046386284,\n \"acc_norm\": 0.6,\n \"acc_norm_stderr\": 0.040824829046386284\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.4894179894179894,\n \"acc_stderr\": 0.02574554227604548,\n \"acc_norm\": 0.4894179894179894,\n \"acc_norm_stderr\": 0.02574554227604548\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.40476190476190477,\n \"acc_stderr\": 0.04390259265377561,\n \"acc_norm\": 0.40476190476190477,\n \"acc_norm_stderr\": 0.04390259265377561\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.36,\n \"acc_stderr\": 0.04824181513244218,\n \"acc_norm\": 0.36,\n \"acc_norm_stderr\": 0.04824181513244218\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.7967741935483871,\n \"acc_stderr\": 0.02289168798455495,\n \"acc_norm\": 0.7967741935483871,\n \"acc_norm_stderr\": 0.02289168798455495\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.5270935960591133,\n \"acc_stderr\": 0.03512819077876106,\n \"acc_norm\": 0.5270935960591133,\n \"acc_norm_stderr\": 0.03512819077876106\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.69,\n \"acc_stderr\": 0.04648231987117316,\n \"acc_norm\": 0.69,\n \"acc_norm_stderr\": 0.04648231987117316\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.8121212121212121,\n \"acc_stderr\": 0.03050193405942914,\n \"acc_norm\": 0.8121212121212121,\n \"acc_norm_stderr\": 0.03050193405942914\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.8383838383838383,\n \"acc_stderr\": 0.026225919863629276,\n \"acc_norm\": 0.8383838383838383,\n \"acc_norm_stderr\": 0.026225919863629276\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.9119170984455959,\n \"acc_stderr\": 0.02045374660160103,\n \"acc_norm\": 0.9119170984455959,\n \"acc_norm_stderr\": 0.02045374660160103\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.6743589743589744,\n \"acc_stderr\": 0.02375966576741229,\n \"acc_norm\": 0.6743589743589744,\n \"acc_norm_stderr\": 0.02375966576741229\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.35185185185185186,\n \"acc_stderr\": 0.02911661760608302,\n \"acc_norm\": 0.35185185185185186,\n \"acc_norm_stderr\": 0.02911661760608302\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.6890756302521008,\n \"acc_stderr\": 0.03006676158297794,\n \"acc_norm\": 0.6890756302521008,\n \"acc_norm_stderr\": 0.03006676158297794\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.3708609271523179,\n \"acc_stderr\": 0.03943966699183629,\n \"acc_norm\": 0.3708609271523179,\n \"acc_norm_stderr\": 0.03943966699183629\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.8403669724770643,\n \"acc_stderr\": 0.015703498348461783,\n \"acc_norm\": 0.8403669724770643,\n \"acc_norm_stderr\": 0.015703498348461783\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.5462962962962963,\n \"acc_stderr\": 0.033953227263757976,\n \"acc_norm\": 0.5462962962962963,\n \"acc_norm_stderr\": 0.033953227263757976\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.8676470588235294,\n \"acc_stderr\": 0.023784297520918863,\n \"acc_norm\": 0.8676470588235294,\n \"acc_norm_stderr\": 0.023784297520918863\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.8565400843881856,\n \"acc_stderr\": 0.022818291821017012,\n \"acc_norm\": 0.8565400843881856,\n \"acc_norm_stderr\": 0.022818291821017012\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.7309417040358744,\n \"acc_stderr\": 0.029763779406874965,\n \"acc_norm\": 0.7309417040358744,\n \"acc_norm_stderr\": 0.029763779406874965\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.732824427480916,\n \"acc_stderr\": 0.038808483010823944,\n \"acc_norm\": 0.732824427480916,\n \"acc_norm_stderr\": 0.038808483010823944\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.8264462809917356,\n \"acc_stderr\": 0.03457272836917669,\n \"acc_norm\": 0.8264462809917356,\n \"acc_norm_stderr\": 0.03457272836917669\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.8240740740740741,\n \"acc_stderr\": 0.03680918141673881,\n \"acc_norm\": 0.8240740740740741,\n \"acc_norm_stderr\": 0.03680918141673881\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.7484662576687117,\n \"acc_stderr\": 0.03408997886857529,\n \"acc_norm\": 0.7484662576687117,\n \"acc_norm_stderr\": 0.03408997886857529\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.48214285714285715,\n \"acc_stderr\": 0.047427623612430116,\n \"acc_norm\": 0.48214285714285715,\n \"acc_norm_stderr\": 0.047427623612430116\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.8252427184466019,\n \"acc_stderr\": 0.03760178006026621,\n \"acc_norm\": 0.8252427184466019,\n \"acc_norm_stderr\": 0.03760178006026621\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8846153846153846,\n \"acc_stderr\": 0.020930193185179333,\n \"acc_norm\": 0.8846153846153846,\n \"acc_norm_stderr\": 0.020930193185179333\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.79,\n \"acc_stderr\": 0.040936018074033256,\n \"acc_norm\": 0.79,\n \"acc_norm_stderr\": 0.040936018074033256\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.8263090676883781,\n \"acc_stderr\": 0.013547415658662255,\n \"acc_norm\": 0.8263090676883781,\n \"acc_norm_stderr\": 0.013547415658662255\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.7312138728323699,\n \"acc_stderr\": 0.02386800326250011,\n \"acc_norm\": 0.7312138728323699,\n \"acc_norm_stderr\": 0.02386800326250011\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.39776536312849164,\n \"acc_stderr\": 0.016369204971262975,\n \"acc_norm\": 0.39776536312849164,\n \"acc_norm_stderr\": 0.016369204971262975\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.7777777777777778,\n \"acc_stderr\": 0.02380518652488815,\n \"acc_norm\": 0.7777777777777778,\n \"acc_norm_stderr\": 0.02380518652488815\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.7202572347266881,\n \"acc_stderr\": 0.02549425935069491,\n \"acc_norm\": 0.7202572347266881,\n \"acc_norm_stderr\": 0.02549425935069491\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.7623456790123457,\n \"acc_stderr\": 0.02368359183700856,\n \"acc_norm\": 0.7623456790123457,\n \"acc_norm_stderr\": 0.02368359183700856\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.5141843971631206,\n \"acc_stderr\": 0.02981549448368206,\n \"acc_norm\": 0.5141843971631206,\n \"acc_norm_stderr\": 0.02981549448368206\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.5,\n \"acc_stderr\": 0.012770236105969923,\n \"acc_norm\": 0.5,\n \"acc_norm_stderr\": 0.012770236105969923\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.7169117647058824,\n \"acc_stderr\": 0.027365861131513812,\n \"acc_norm\": 0.7169117647058824,\n \"acc_norm_stderr\": 0.027365861131513812\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.6895424836601307,\n \"acc_stderr\": 0.018718067052623227,\n \"acc_norm\": 0.6895424836601307,\n \"acc_norm_stderr\": 0.018718067052623227\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.7,\n \"acc_stderr\": 0.04389311454644286,\n \"acc_norm\": 0.7,\n \"acc_norm_stderr\": 0.04389311454644286\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.746938775510204,\n \"acc_stderr\": 0.027833023871399687,\n \"acc_norm\": 0.746938775510204,\n \"acc_norm_stderr\": 0.027833023871399687\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.8557213930348259,\n \"acc_stderr\": 0.024845753212306053,\n \"acc_norm\": 0.8557213930348259,\n \"acc_norm_stderr\": 0.024845753212306053\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.9,\n \"acc_stderr\": 0.03015113445777634,\n \"acc_norm\": 0.9,\n \"acc_norm_stderr\": 0.03015113445777634\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5602409638554217,\n \"acc_stderr\": 0.03864139923699122,\n \"acc_norm\": 0.5602409638554217,\n \"acc_norm_stderr\": 0.03864139923699122\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.8070175438596491,\n \"acc_stderr\": 0.030267457554898458,\n \"acc_norm\": 0.8070175438596491,\n \"acc_norm_stderr\": 0.030267457554898458\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.5152998776009792,\n \"mc1_stderr\": 0.0174953044731879,\n \"mc2\": 0.6656920159558114,\n \"mc2_stderr\": 0.015499118988583797\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.8310970797158642,\n \"acc_stderr\": 0.01052998141183891\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.6868840030326004,\n \"acc_stderr\": 0.01277428566938509\n }\n}\n```", "repo_url": "https://huggingface.co/Steelskull/Umbra-v2.1-MoE-4x10.7", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_01_25T14_47_58.441546", "path": ["**/details_harness|arc:challenge|25_2024-01-25T14-47-58.441546.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-01-25T14-47-58.441546.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_01_25T14_47_58.441546", "path": ["**/details_harness|gsm8k|5_2024-01-25T14-47-58.441546.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-01-25T14-47-58.441546.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_01_25T14_47_58.441546", "path": ["**/details_harness|hellaswag|10_2024-01-25T14-47-58.441546.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-01-25T14-47-58.441546.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_01_25T14_47_58.441546", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-25T14-47-58.441546.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-25T14-47-58.441546.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-25T14-47-58.441546.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-25T14-47-58.441546.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-25T14-47-58.441546.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-25T14-47-58.441546.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-25T14-47-58.441546.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-25T14-47-58.441546.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-25T14-47-58.441546.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-25T14-47-58.441546.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-25T14-47-58.441546.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-25T14-47-58.441546.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-25T14-47-58.441546.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-25T14-47-58.441546.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-25T14-47-58.441546.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-25T14-47-58.441546.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-25T14-47-58.441546.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-25T14-47-58.441546.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-25T14-47-58.441546.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-25T14-47-58.441546.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-25T14-47-58.441546.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-25T14-47-58.441546.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-25T14-47-58.441546.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-25T14-47-58.441546.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-25T14-47-58.441546.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-25T14-47-58.441546.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-25T14-47-58.441546.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-25T14-47-58.441546.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-25T14-47-58.441546.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-25T14-47-58.441546.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-25T14-47-58.441546.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-25T14-47-58.441546.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-25T14-47-58.441546.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-25T14-47-58.441546.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-25T14-47-58.441546.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-25T14-47-58.441546.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-25T14-47-58.441546.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-25T14-47-58.441546.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-25T14-47-58.441546.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-25T14-47-58.441546.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-25T14-47-58.441546.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-25T14-47-58.441546.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-25T14-47-58.441546.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-25T14-47-58.441546.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-25T14-47-58.441546.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-25T14-47-58.441546.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-25T14-47-58.441546.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-25T14-47-58.441546.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-25T14-47-58.441546.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-25T14-47-58.441546.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-25T14-47-58.441546.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-25T14-47-58.441546.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-25T14-47-58.441546.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-25T14-47-58.441546.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-25T14-47-58.441546.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-25T14-47-58.441546.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-25T14-47-58.441546.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-25T14-47-58.441546.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-25T14-47-58.441546.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-25T14-47-58.441546.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-25T14-47-58.441546.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-25T14-47-58.441546.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-25T14-47-58.441546.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-25T14-47-58.441546.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-25T14-47-58.441546.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-25T14-47-58.441546.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-25T14-47-58.441546.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-25T14-47-58.441546.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-25T14-47-58.441546.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-25T14-47-58.441546.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-25T14-47-58.441546.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-25T14-47-58.441546.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-25T14-47-58.441546.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-25T14-47-58.441546.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-25T14-47-58.441546.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-25T14-47-58.441546.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-25T14-47-58.441546.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-25T14-47-58.441546.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-25T14-47-58.441546.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-25T14-47-58.441546.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-25T14-47-58.441546.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-25T14-47-58.441546.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-25T14-47-58.441546.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-25T14-47-58.441546.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-25T14-47-58.441546.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-25T14-47-58.441546.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-25T14-47-58.441546.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-25T14-47-58.441546.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-25T14-47-58.441546.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-25T14-47-58.441546.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-25T14-47-58.441546.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-25T14-47-58.441546.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-25T14-47-58.441546.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-25T14-47-58.441546.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-25T14-47-58.441546.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-25T14-47-58.441546.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-25T14-47-58.441546.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-25T14-47-58.441546.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-25T14-47-58.441546.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-25T14-47-58.441546.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-25T14-47-58.441546.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-25T14-47-58.441546.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-25T14-47-58.441546.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-25T14-47-58.441546.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-25T14-47-58.441546.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-25T14-47-58.441546.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-25T14-47-58.441546.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-25T14-47-58.441546.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-25T14-47-58.441546.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-25T14-47-58.441546.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-25T14-47-58.441546.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-25T14-47-58.441546.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-25T14-47-58.441546.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-25T14-47-58.441546.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_01_25T14_47_58.441546", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-25T14-47-58.441546.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-25T14-47-58.441546.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_01_25T14_47_58.441546", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-25T14-47-58.441546.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-25T14-47-58.441546.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_01_25T14_47_58.441546", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-25T14-47-58.441546.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-25T14-47-58.441546.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_01_25T14_47_58.441546", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-25T14-47-58.441546.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-25T14-47-58.441546.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_01_25T14_47_58.441546", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-25T14-47-58.441546.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-25T14-47-58.441546.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_01_25T14_47_58.441546", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-25T14-47-58.441546.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-25T14-47-58.441546.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_01_25T14_47_58.441546", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-25T14-47-58.441546.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-25T14-47-58.441546.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_01_25T14_47_58.441546", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-25T14-47-58.441546.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-25T14-47-58.441546.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_01_25T14_47_58.441546", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-25T14-47-58.441546.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-25T14-47-58.441546.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_01_25T14_47_58.441546", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-25T14-47-58.441546.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-25T14-47-58.441546.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_01_25T14_47_58.441546", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-25T14-47-58.441546.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-25T14-47-58.441546.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_01_25T14_47_58.441546", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-25T14-47-58.441546.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-25T14-47-58.441546.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_01_25T14_47_58.441546", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-25T14-47-58.441546.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-25T14-47-58.441546.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_01_25T14_47_58.441546", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-25T14-47-58.441546.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-25T14-47-58.441546.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_01_25T14_47_58.441546", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-25T14-47-58.441546.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-25T14-47-58.441546.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_01_25T14_47_58.441546", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-25T14-47-58.441546.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-25T14-47-58.441546.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_01_25T14_47_58.441546", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-25T14-47-58.441546.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-25T14-47-58.441546.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_01_25T14_47_58.441546", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-25T14-47-58.441546.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-25T14-47-58.441546.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_01_25T14_47_58.441546", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-25T14-47-58.441546.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-25T14-47-58.441546.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_01_25T14_47_58.441546", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-25T14-47-58.441546.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-25T14-47-58.441546.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_01_25T14_47_58.441546", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-25T14-47-58.441546.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-25T14-47-58.441546.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_01_25T14_47_58.441546", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-25T14-47-58.441546.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-25T14-47-58.441546.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_01_25T14_47_58.441546", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-25T14-47-58.441546.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-25T14-47-58.441546.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_01_25T14_47_58.441546", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-25T14-47-58.441546.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-25T14-47-58.441546.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_01_25T14_47_58.441546", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-25T14-47-58.441546.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-25T14-47-58.441546.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_01_25T14_47_58.441546", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-25T14-47-58.441546.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-25T14-47-58.441546.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_01_25T14_47_58.441546", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-25T14-47-58.441546.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-25T14-47-58.441546.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_01_25T14_47_58.441546", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-25T14-47-58.441546.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-25T14-47-58.441546.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_01_25T14_47_58.441546", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-25T14-47-58.441546.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-25T14-47-58.441546.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_01_25T14_47_58.441546", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-25T14-47-58.441546.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-25T14-47-58.441546.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_01_25T14_47_58.441546", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-25T14-47-58.441546.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-25T14-47-58.441546.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_01_25T14_47_58.441546", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-25T14-47-58.441546.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-25T14-47-58.441546.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_01_25T14_47_58.441546", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-25T14-47-58.441546.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-25T14-47-58.441546.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_01_25T14_47_58.441546", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-25T14-47-58.441546.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-25T14-47-58.441546.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_01_25T14_47_58.441546", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-25T14-47-58.441546.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-25T14-47-58.441546.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_01_25T14_47_58.441546", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-25T14-47-58.441546.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-25T14-47-58.441546.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_01_25T14_47_58.441546", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-25T14-47-58.441546.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-25T14-47-58.441546.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_01_25T14_47_58.441546", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-25T14-47-58.441546.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-25T14-47-58.441546.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_01_25T14_47_58.441546", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-25T14-47-58.441546.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-25T14-47-58.441546.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_01_25T14_47_58.441546", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-25T14-47-58.441546.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-25T14-47-58.441546.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_01_25T14_47_58.441546", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-25T14-47-58.441546.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-25T14-47-58.441546.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_01_25T14_47_58.441546", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-25T14-47-58.441546.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-25T14-47-58.441546.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_01_25T14_47_58.441546", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-25T14-47-58.441546.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-25T14-47-58.441546.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_01_25T14_47_58.441546", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-25T14-47-58.441546.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-25T14-47-58.441546.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_01_25T14_47_58.441546", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-25T14-47-58.441546.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-25T14-47-58.441546.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_01_25T14_47_58.441546", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-25T14-47-58.441546.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-25T14-47-58.441546.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_01_25T14_47_58.441546", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-25T14-47-58.441546.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-25T14-47-58.441546.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_01_25T14_47_58.441546", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-25T14-47-58.441546.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-25T14-47-58.441546.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_01_25T14_47_58.441546", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-25T14-47-58.441546.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-25T14-47-58.441546.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_01_25T14_47_58.441546", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-25T14-47-58.441546.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-25T14-47-58.441546.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_01_25T14_47_58.441546", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-25T14-47-58.441546.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-25T14-47-58.441546.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_01_25T14_47_58.441546", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-25T14-47-58.441546.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-25T14-47-58.441546.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_01_25T14_47_58.441546", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-25T14-47-58.441546.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-25T14-47-58.441546.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_01_25T14_47_58.441546", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-25T14-47-58.441546.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-25T14-47-58.441546.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_01_25T14_47_58.441546", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-25T14-47-58.441546.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-25T14-47-58.441546.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_01_25T14_47_58.441546", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-25T14-47-58.441546.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-25T14-47-58.441546.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_01_25T14_47_58.441546", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-25T14-47-58.441546.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-25T14-47-58.441546.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_01_25T14_47_58.441546", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-25T14-47-58.441546.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-25T14-47-58.441546.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_01_25T14_47_58.441546", "path": ["**/details_harness|winogrande|5_2024-01-25T14-47-58.441546.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-01-25T14-47-58.441546.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_01_25T14_47_58.441546", "path": ["results_2024-01-25T14-47-58.441546.parquet"]}, {"split": "latest", "path": ["results_2024-01-25T14-47-58.441546.parquet"]}]}]}
2024-01-25T14:50:42+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of Steelskull/Umbra-v2.1-MoE-4x10.7 Dataset automatically created during the evaluation run of model Steelskull/Umbra-v2.1-MoE-4x10.7 on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-01-25T14:47:58.441546(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of Steelskull/Umbra-v2.1-MoE-4x10.7\n\n\n\nDataset automatically created during the evaluation run of model Steelskull/Umbra-v2.1-MoE-4x10.7 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-25T14:47:58.441546(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of Steelskull/Umbra-v2.1-MoE-4x10.7\n\n\n\nDataset automatically created during the evaluation run of model Steelskull/Umbra-v2.1-MoE-4x10.7 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-25T14:47:58.441546(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ 6, 193, 67, 4, 40, 29, 3, 4, 9, 6, 5, 7, 4, 7, 10, 9, 5, 9, 8, 10, 46, 8, 7, 10, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of Steelskull/Umbra-v2.1-MoE-4x10.7\n\n\n\nDataset automatically created during the evaluation run of model Steelskull/Umbra-v2.1-MoE-4x10.7 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2024-01-25T14:47:58.441546(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):## Dataset Details### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:## Uses### Direct Use### Out-of-Scope Use## Dataset Structure## Dataset Creation### Curation Rationale### Source Data#### Data Collection and Processing#### Who are the source data producers?### Annotations [optional]#### Annotation process#### Who are the annotators?#### Personal and Sensitive Information## Bias, Risks, and Limitations### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:## Glossary [optional]## More Information [optional]## Dataset Card Authors [optional]" ]
[ -0.06695378571748734, 0.21679699420928955, -0.0037075779400765896, 0.03721850737929344, 0.10185058414936066, 0.0028416949789971113, 0.010102259926497936, 0.11265096813440323, -0.022572318091988564, 0.19175481796264648, -0.018182219937443733, 0.09637390077114105, 0.07737284153699875, 0.12344096601009369, 0.016456838697195053, -0.13439926505088806, 0.015079410746693611, -0.07562801986932755, 0.06625177711248398, 0.08023819327354431, 0.07568841427564621, -0.07866557687520981, 0.056187137961387634, -0.047998927533626556, -0.017618386074900627, 0.00568312406539917, -0.09426827728748322, -0.025423511862754822, 0.08920173346996307, 0.08624377846717834, 0.0392201766371727, -0.009980253875255585, 0.019244737923145294, -0.23816192150115967, 0.015447765588760376, 0.09141320735216141, -0.001217246288433671, 0.0428411066532135, 0.11453045159578323, -0.07551443576812744, 0.021114610135555267, -0.08293113112449646, 0.06991132348775864, 0.044225919991731644, -0.11659397184848785, -0.15064530074596405, -0.1512240320444107, 0.02199915051460266, 0.06205619499087334, 0.03918898105621338, -0.030615318566560745, 0.16758696734905243, -0.029587512835860252, 0.03479617461562157, 0.1203937754034996, -0.12365236133337021, -0.017444446682929993, 0.04317578300833702, 0.029477359727025032, 0.07331255078315735, -0.09529312700033188, 0.003744198242202401, 0.04039378836750984, 0.05457613617181778, 0.010134809650480747, 0.007841845043003559, -0.054725758731365204, 0.021548481658101082, -0.1355799287557602, -0.10584990680217743, 0.1964983195066452, 0.00631299102678895, -0.045651309192180634, -0.17490819096565247, -0.011340022087097168, 0.03326258435845375, 0.004300021566450596, -0.04801267385482788, 0.0034844335168600082, -0.023117486387491226, 0.07096073031425476, -0.01267319731414318, -0.09953232109546661, -0.019026288762688637, 0.007778031285852194, 0.042109839618206024, 0.01174844242632389, -0.01686389558017254, 0.004335947334766388, 0.10487449169158936, -0.023901568725705147, -0.08525458723306656, -0.06935771554708481, -0.058709364384412766, -0.11594405770301819, -0.05782872438430786, 0.00961123313754797, -0.07111857831478119, 0.0264718160033226, 0.2296660989522934, -0.029585467651486397, 0.03587917611002922, -0.09953954070806503, 0.010939019732177258, 0.11733002215623856, 0.048797428607940674, -0.0764450803399086, -0.06889580190181732, -0.024863453581929207, 0.02549261972308159, 0.02615312486886978, -0.025900136679410934, 0.0053700897842645645, 0.05674480274319649, 0.05777440220117569, 0.11382201313972473, 0.12584805488586426, 0.03207887336611748, -0.07210203260183334, -0.027453774586319923, 0.2238161861896515, -0.14306126534938812, -0.00006210243736859411, 0.006738913245499134, -0.022782662883400917, -0.08944956213235855, 0.08203481137752533, 0.003683468559756875, -0.056096822023391724, 0.11680794507265091, -0.05725166201591492, -0.08617684990167618, -0.07037889957427979, -0.05638379603624344, 0.06585826724767685, 0.010467557236552238, -0.03032108210027218, -0.08841338008642197, -0.08367905765771866, -0.07104150950908661, 0.02291765622794628, -0.06329505145549774, -0.03715226426720619, 0.05064413323998451, -0.01384761556982994, -0.009006280452013016, -0.022000523284077644, 0.11660201102495193, -0.045626450330019, 0.028323568403720856, 0.014785593375563622, 0.0019459491595625877, 0.08697053045034409, 0.052414827048778534, -0.11879213154315948, 0.07969613373279572, -0.12209556251764297, 0.08464369922876358, -0.11213740706443787, 0.007112154737114906, -0.14352178573608398, -0.014513494446873665, -0.0467517226934433, 0.011807342059910297, 0.013445664197206497, 0.10769062489271164, -0.22415675222873688, 0.010202893987298012, 0.11365144699811935, -0.11247455328702927, -0.1142927035689354, 0.0789773091673851, -0.037894971668720245, 0.07198037952184677, 0.05563334748148918, 0.11730403453111649, 0.11949624121189117, -0.09247434884309769, -0.12386613339185715, -0.07436293363571167, -0.035255059599876404, 0.14602889120578766, 0.07244198024272919, -0.07365533709526062, 0.11282085627317429, 0.05058174207806587, -0.012598506174981594, -0.1155223622918129, -0.01295456476509571, -0.06806880235671997, -0.024830784648656845, -0.04799084737896919, -0.06409195810556412, 0.0050408863462507725, -0.09097805619239807, -0.017525004222989082, -0.09935367852449417, 0.036410972476005554, 0.09034930914640427, -0.02141992375254631, 0.010779384523630142, -0.05643411725759506, 0.06425464153289795, -0.0044205146841704845, 0.027016527950763702, -0.2151629626750946, -0.12059663236141205, 0.03649849444627762, -0.13230715692043304, 0.04995406046509743, 0.03597387671470642, 0.01565518230199814, 0.04552866518497467, -0.014862189069390297, 0.017280282452702522, 0.009772352874279022, -0.0008178390562534332, -0.015655390918254852, -0.16290055215358734, -0.06862516701221466, -0.08428330719470978, 0.09885057061910629, -0.13968804478645325, -0.017012685537338257, 0.09318927675485611, 0.1581907570362091, -0.0015389504842460155, -0.0772847831249237, 0.07565159350633621, 0.0038660215213894844, -0.030752595514059067, -0.057967305183410645, 0.012357953935861588, -0.013097134418785572, 0.03920828551054001, 0.03219223767518997, -0.21387581527233124, -0.12172967940568924, 0.08203393965959549, 0.12513069808483124, -0.07719144970178604, -0.09803532809019089, -0.06270996481180191, -0.06706788390874863, -0.07522690296173096, -0.07783514261245728, 0.07446826249361038, 0.06460433453321457, 0.02845604158937931, -0.07452952861785889, -0.0586928054690361, 0.012866497971117496, 0.04974043369293213, -0.0856943130493164, 0.09645574539899826, 0.062352683395147324, -0.09867272526025772, 0.10922986268997192, -0.02204948663711548, 0.12922519445419312, 0.07273009419441223, 0.03543148934841156, -0.10013162344694138, -0.008789367973804474, 0.05317255109548569, 0.045270007103681564, 0.10261528939008713, -0.04184747114777565, 0.03687414526939392, 0.07737024128437042, 0.0032514745835214853, 0.053430236876010895, -0.04728246480226517, 0.023371784016489983, 0.030148407444357872, 0.012804802507162094, 0.01679920218884945, 0.007025141268968582, 0.0015680808573961258, 0.06656806170940399, 0.042976293712854385, 0.14556379616260529, -0.01481064036488533, -0.040932781994342804, -0.10795685648918152, 0.1437760442495346, -0.08799262344837189, -0.2831345200538635, -0.16471301019191742, -0.02030043490231037, -0.024972129613161087, -0.024632738903164864, 0.06780721992254257, -0.023692460730671883, -0.09939111024141312, -0.09029578417539597, 0.05095094442367554, -0.004501170013099909, -0.11519443243741989, -0.05142777040600777, 0.060676686465740204, 0.016338765621185303, -0.16106978058815002, 0.044519346207380295, 0.04381551221013069, -0.03621872141957283, -0.002357935067266226, 0.09051556885242462, 0.15198971331119537, 0.06904608011245728, 0.057538632303476334, -0.02307538129389286, -0.006111451890319586, 0.17928054928779602, -0.09831057488918304, 0.027899345383048058, 0.12055085599422455, -0.06024731323122978, 0.07674549520015717, 0.1732827126979828, 0.00666947802528739, -0.09410018473863602, 0.042871393263339996, 0.0897563099861145, -0.06955080479383469, -0.23802393674850464, -0.1033783107995987, -0.02336839586496353, 0.0017563090659677982, 0.10395924746990204, 0.05331653729081154, -0.015120142139494419, 0.022059503942728043, -0.11624372005462646, -0.024736590683460236, -0.058993615210056305, 0.08484888821840286, 0.03928132355213165, -0.0002149121864931658, 0.052437786012887955, -0.03875170275568962, 0.032491352409124374, 0.1186249777674675, 0.04871212691068649, 0.15072375535964966, -0.0391152948141098, 0.15883685648441315, 0.0856664627790451, 0.11148812621831894, -0.034615300595760345, 0.04749725013971329, -0.004035099409520626, 0.06330154091119766, -0.008406870067119598, -0.10823681950569153, -0.03603767603635788, 0.09386371821165085, 0.016294464468955994, -0.06797805428504944, 0.011789624579250813, -0.056802764534950256, 0.045511502772569656, 0.17513050138950348, -0.015646323561668396, -0.16628168523311615, -0.07572025805711746, 0.05799247696995735, -0.004775094334036112, -0.08598171174526215, -0.0500452034175396, 0.0574820376932621, -0.13944676518440247, 0.028463829308748245, -0.034614890813827515, 0.08492793887853622, -0.10567368566989899, -0.018833842128515244, -0.01080441102385521, 0.05509170889854431, -0.004692187067121267, 0.12412559986114502, -0.11447757482528687, 0.10829632729291916, 0.017022045329213142, 0.037440381944179535, -0.10101212561130524, 0.0482967235147953, -0.04527928680181503, -0.0386061891913414, 0.14006610214710236, -0.01409184280782938, -0.09673916548490524, -0.0610395185649395, -0.11282623559236526, 0.007456425577402115, 0.07873988151550293, -0.10923156142234802, 0.11604564636945724, 0.023635225370526314, -0.0058126202784478664, -0.01557102333754301, 0.00007785999332554638, -0.1405109316110611, -0.22871199250221252, 0.12496715039014816, -0.1328689604997635, 0.05852406099438667, -0.05105132609605789, -0.039477135986089706, -0.044916119426488876, 0.19009838998317719, -0.09295782446861267, -0.049525536596775055, -0.13492240011692047, 0.04602248594164848, 0.18313826620578766, -0.054269976913928986, 0.04180087894201279, -0.05283857509493828, 0.18921403586864471, -0.0028196487110108137, -0.06147230416536331, -0.013846294023096561, -0.08801216632127762, -0.1585097312927246, -0.037462811917066574, 0.12417144328355789, 0.056498244404792786, 0.006059438455849886, 0.012683025561273098, 0.040205586701631546, 0.008271422237157822, -0.09577611833810806, 0.047353390604257584, 0.08133845031261444, 0.14575718343257904, 0.029624639078974724, -0.06277286261320114, -0.0986093059182167, -0.09669821709394455, -0.09274772554636002, 0.05759444460272789, 0.14995773136615753, -0.07156053185462952, 0.15429338812828064, 0.16284522414207458, -0.10761871933937073, -0.21341337263584137, -0.03757782652974129, 0.02258818969130516, -0.014269599691033363, 0.13061879575252533, -0.18886850774288177, 0.08055310696363449, 0.046122029423713684, -0.0065975491888821125, 0.0808485895395279, -0.22246094048023224, -0.1313045471906662, 0.008664469234645367, 0.01960679702460766, -0.22277691960334778, -0.1815016269683838, -0.1073278859257698, -0.033674679696559906, -0.13736838102340698, 0.13031262159347534, -0.04739982262253761, 0.031817030161619186, -0.007435714825987816, 0.08435215055942535, 0.05366876721382141, -0.0726117417216301, 0.14072294533252716, -0.005496060475707054, 0.009348622523248196, -0.0998491421341896, -0.010568199679255486, 0.04518710449337959, -0.0503414161503315, 0.10964594036340714, 0.03311653062701225, 0.05960748717188835, -0.07898783683776855, -0.035654596984386444, -0.04890300706028938, 0.06603415310382843, -0.07733386009931564, -0.054619740694761276, -0.057726986706256866, 0.08489236235618591, 0.07410590350627899, -0.029810748994350433, -0.012795446440577507, -0.029689356684684753, 0.05533036217093468, 0.1769949346780777, 0.12239916622638702, 0.04063330590724945, -0.1256464570760727, -0.007823165506124496, -0.01748991385102272, -0.0038454418536275625, -0.1306506246328354, 0.043518658727407455, 0.09051687270402908, 0.05149199441075325, 0.06758568435907364, -0.01985407993197441, -0.19693423807621002, 0.002633263822644949, 0.08654697984457016, -0.12118882685899734, -0.19059158861637115, 0.029762784019112587, 0.12831254303455353, -0.16921350359916687, -0.05597510561347008, 0.09232363104820251, 0.022481683641672134, -0.031119007617235184, -0.0013143457472324371, 0.07491923868656158, 0.03837823495268822, 0.09989194571971893, 0.016793590039014816, 0.056421902030706406, -0.06250396370887756, 0.11304284632205963, 0.12959982454776764, -0.13901518285274506, 0.03230547532439232, 0.05260145664215088, -0.060929685831069946, -0.07757902890443802, 0.02985295094549656, 0.013911913149058819, 0.031677909195423126, -0.04186154156923294, 0.011620928533375263, -0.0024945507757365704, 0.03692195564508438, 0.10250739753246307, 0.008933703415095806, 0.03430284187197685, 0.03416142612695694, -0.0003356854140292853, -0.09381510317325592, 0.09335210919380188, 0.03692026063799858, 0.03417755290865898, -0.0511942096054554, 0.03283192217350006, 0.02785116620361805, -0.010386182926595211, 0.008940592408180237, -0.031585000455379486, -0.07388368993997574, -0.013032774440944195, -0.1356201469898224, 0.030023958534002304, -0.0736217349767685, 0.003748006420210004, -0.026460466906428337, -0.0287660900503397, -0.02831113152205944, 0.013179660774767399, -0.05069366469979286, -0.06307616084814072, -0.05010366812348366, 0.11442831158638, -0.2059887796640396, -0.001945624710060656, 0.09934093803167343, -0.06319616734981537, 0.0769958645105362, 0.0031328939367085695, -0.013737085275352001, 0.01691710762679577, -0.06570646911859512, -0.011259038001298904, -0.029869012534618378, 0.04338512569665909, 0.016250573098659515, -0.16358980536460876, -0.017119238153100014, 0.005750443786382675, -0.06406951695680618, -0.0008198883733712137, 0.08183464407920837, -0.14051134884357452, 0.010545410215854645, 0.05799472704529762, -0.04892992600798607, -0.04868829622864723, 0.05291702225804329, 0.058101117610931396, 0.021489037200808525, 0.09760674834251404, -0.00281531666405499, 0.0406646728515625, -0.16647562384605408, -0.05709484964609146, -0.0031241646502166986, 0.006169222295284271, 0.022527489811182022, 0.030779685825109482, 0.0440845713019371, -0.007705674972385168, 0.2097206562757492, -0.008764056488871574, 0.08719278126955032, 0.03778518736362457, 0.005142486188560724, -0.04514804109930992, 0.03090374357998371, 0.027871472761034966, 0.002185551216825843, 0.032207511365413666, 0.021182559430599213, -0.01355635467916727, -0.05831164866685867, -0.03958610072731972, 0.0636107549071312, 0.17195218801498413, 0.17501278221607208, -0.04845862835645676, 0.08159832656383514, -0.17569205164909363, -0.07683930546045303, 0.029896225780248642, -0.03462158516049385, 0.036403581500053406, -0.07165886461734772, 0.04681410640478134, 0.09176162630319595, -0.10186519473791122, 0.13888566195964813, -0.0769316554069519, -0.053150154650211334, -0.03734893351793289, -0.13518460094928741, -0.04954500123858452, 0.01675243303179741, 0.008735409006476402, -0.10285026580095291, 0.09188546985387802, 0.12646856904029846, -0.024612754583358765, -0.0026300461031496525, 0.10386892408132553, -0.04565516114234924, -0.0675317570567131, -0.038610007613897324, 0.00436935480684042, 0.011727589182555676, 0.018381986767053604, 0.08533064275979996, 0.03229981288313866, 0.07555672526359558, 0.0671365037560463, 0.08691313862800598, 0.043753255158662796, 0.01393604651093483, -0.054393019527196884, -0.07276231795549393, -0.009598341770470142, -0.0037823463790118694, -0.04845605418086052, 0.18130996823310852, 0.046797480434179306, 0.008146878331899643, -0.003452029312029481, 0.19536828994750977, 0.020854923874139786, -0.05835135653614998, -0.14466555416584015, 0.09485705196857452, -0.02398299053311348, 0.021220209077000618, 0.02858767844736576, -0.13807889819145203, 0.047611430287361145, 0.17885822057724, 0.11500944197177887, 0.03589245304465294, 0.00922313705086708, 0.031110849231481552, 0.02581351436674595, -0.031890325248241425, 0.051634930074214935, 0.03428220748901367, 0.17590664327144623, -0.06378186494112015, 0.05545927211642265, -0.026736319065093994, -0.017754940316081047, -0.015344411134719849, 0.10181432962417603, -0.03641515225172043, 0.031071096658706665, -0.05168912187218666, 0.11461327224969864, -0.02651635929942131, -0.27198493480682373, -0.0231146402657032, -0.10722250491380692, -0.13304349780082703, -0.019923826679587364, 0.04412013292312622, -0.051328789442777634, 0.04123266041278839, 0.030849453061819077, -0.019229557365179062, 0.1830066442489624, 0.016315147280693054, -0.07784409075975418, -0.07177423685789108, 0.06783833354711533, 0.002019510604441166, 0.24400249123573303, -0.019156066700816154, 0.061054401099681854, 0.10314244031906128, -0.015947189182043076, -0.16033488512039185, -0.018397988751530647, 0.10434253513813019, -0.04955652356147766, 0.05606992170214653, 0.1663418859243393, -0.04230064898729324, 0.12833066284656525, 0.04360371455550194, -0.04463537037372589, 0.04555777460336685, 0.054308321326971054, 0.044967927038669586, -0.087986059486866, 0.0818934440612793, -0.08996307104825974, 0.14751967787742615, 0.1066250428557396, -0.0280528012663126, 0.0019454159773886204, -0.07525584101676941, 0.07186464965343475, -0.02248627133667469, 0.11912375688552856, -0.0002845343842636794, -0.18646647036075592, 0.030641824007034302, 0.036062758415937424, 0.056324999779462814, -0.24343807995319366, -0.07556147873401642, 0.14049328863620758, -0.03741234540939331, 0.004008022602647543, 0.09776555746793747, 0.040365464985370636, 0.001298196380957961, -0.0680483728647232, -0.09483807533979416, -0.003995059058070183, 0.12875661253929138, -0.10891211032867432, -0.037934400141239166 ]
eada7466bf7352c25ac5c26c8409790a3350ffb0
# YE-POP (a derived dataset of Laion POP) YE-POP is a derived dataset from Laion-POP, meticulously curated and filtered to enhance the quality and utility of the original dataset. The dataset comprises 11 chunks, each containing 50,000 image URLs from Laion-POP. NSFW sorting has been used as a baseline, and human verification has been conducted to ensure the dataset's reliability. For the initial comparison, Chunk 1 has been curated with Gemini-Pro and released as part of a research work to the community. For access to other chunks generated by gemini-pro, interested parties are encouraged to contact us. The primary goal of YE-POP is to provide a dataset with improved art image descriptions while retaining the essence of Laion-POP for baseline comparisons in diffusion models and image captioning tasks. We anticipate that training multimodal models on this dataset will lead to enhanced generation capabilities. ## Dataset Details Each zip file contains predownloaded images, and the JSON file includes dictionaries of image features with the following fields: - `filename` - `url` - `cogvlm_caption` - `llava_caption` - `nsfw_prediction` - `alt_txt` - `alt_txt_similarity` - `width` - `height` - `original_width` - `original_height` - `exif` For more [detailed information](https://laion.ai/blog/laion-pop/#dataset-and-methodology) on the fields, refer to the JSON file. ## Dataset Card Authors [Yaroslav Ponomarenko]() [Ejafa Bassam]() ## Dataset Card Contact @[Peking University](https://cs.pku.edu.cn/English/Home.htm) ## Acknowledgments [Laion (Christoph Schuhmann, Peter Bevan)]() [Google Gemini-Pro](https://doi.org/10.48550/arXiv.2312.11805)
Ejafa/ye-pop
[ "size_categories:100K<n<1M", "language:en", "license:apache-2.0", "art", "region:us" ]
2024-01-25T14:56:01+00:00
{"language": ["en"], "license": "apache-2.0", "size_categories": ["100K<n<1M"], "pretty_name": "ye-pop", "tags": ["art"]}
2024-01-25T23:41:11+00:00
[]
[ "en" ]
TAGS #size_categories-100K<n<1M #language-English #license-apache-2.0 #art #region-us
# YE-POP (a derived dataset of Laion POP) YE-POP is a derived dataset from Laion-POP, meticulously curated and filtered to enhance the quality and utility of the original dataset. The dataset comprises 11 chunks, each containing 50,000 image URLs from Laion-POP. NSFW sorting has been used as a baseline, and human verification has been conducted to ensure the dataset's reliability. For the initial comparison, Chunk 1 has been curated with Gemini-Pro and released as part of a research work to the community. For access to other chunks generated by gemini-pro, interested parties are encouraged to contact us. The primary goal of YE-POP is to provide a dataset with improved art image descriptions while retaining the essence of Laion-POP for baseline comparisons in diffusion models and image captioning tasks. We anticipate that training multimodal models on this dataset will lead to enhanced generation capabilities. ## Dataset Details Each zip file contains predownloaded images, and the JSON file includes dictionaries of image features with the following fields: - 'filename' - 'url' - 'cogvlm_caption' - 'llava_caption' - 'nsfw_prediction' - 'alt_txt' - 'alt_txt_similarity' - 'width' - 'height' - 'original_width' - 'original_height' - 'exif' For more detailed information on the fields, refer to the JSON file. ## Dataset Card Authors [Yaroslav Ponomarenko]() [Ejafa Bassam]() ## Dataset Card Contact @Peking University ## Acknowledgments [Laion (Christoph Schuhmann, Peter Bevan)]() Google Gemini-Pro
[ "# YE-POP (a derived dataset of Laion POP)\nYE-POP is a derived dataset from Laion-POP, meticulously curated and filtered to enhance the quality and utility of the original dataset. The dataset comprises 11 chunks, each containing 50,000 image URLs from Laion-POP. NSFW sorting has been used as a baseline, and human verification has been conducted to ensure the dataset's reliability.\n\nFor the initial comparison, Chunk 1 has been curated with Gemini-Pro and released as part of a research work to the community. For access to other chunks generated by gemini-pro, interested parties are encouraged to contact us. The primary goal of YE-POP is to provide a dataset with improved art image descriptions while retaining the essence of Laion-POP for baseline comparisons in diffusion models and image captioning tasks. We anticipate that training multimodal models on this dataset will lead to enhanced generation capabilities.", "## Dataset Details\nEach zip file contains predownloaded images, and the JSON file includes dictionaries of image features with the following fields: \n\n- 'filename'\n- 'url'\n- 'cogvlm_caption'\n- 'llava_caption'\n- 'nsfw_prediction'\n- 'alt_txt'\n- 'alt_txt_similarity'\n- 'width'\n- 'height'\n- 'original_width'\n- 'original_height'\n- 'exif'\n\nFor more detailed information on the fields, refer to the JSON file.", "## Dataset Card Authors \n\n[Yaroslav Ponomarenko]()\n\n[Ejafa Bassam]()", "## Dataset Card Contact\n@Peking University", "## Acknowledgments\n\n[Laion (Christoph Schuhmann, Peter Bevan)]()\n\nGoogle Gemini-Pro" ]
[ "TAGS\n#size_categories-100K<n<1M #language-English #license-apache-2.0 #art #region-us \n", "# YE-POP (a derived dataset of Laion POP)\nYE-POP is a derived dataset from Laion-POP, meticulously curated and filtered to enhance the quality and utility of the original dataset. The dataset comprises 11 chunks, each containing 50,000 image URLs from Laion-POP. NSFW sorting has been used as a baseline, and human verification has been conducted to ensure the dataset's reliability.\n\nFor the initial comparison, Chunk 1 has been curated with Gemini-Pro and released as part of a research work to the community. For access to other chunks generated by gemini-pro, interested parties are encouraged to contact us. The primary goal of YE-POP is to provide a dataset with improved art image descriptions while retaining the essence of Laion-POP for baseline comparisons in diffusion models and image captioning tasks. We anticipate that training multimodal models on this dataset will lead to enhanced generation capabilities.", "## Dataset Details\nEach zip file contains predownloaded images, and the JSON file includes dictionaries of image features with the following fields: \n\n- 'filename'\n- 'url'\n- 'cogvlm_caption'\n- 'llava_caption'\n- 'nsfw_prediction'\n- 'alt_txt'\n- 'alt_txt_similarity'\n- 'width'\n- 'height'\n- 'original_width'\n- 'original_height'\n- 'exif'\n\nFor more detailed information on the fields, refer to the JSON file.", "## Dataset Card Authors \n\n[Yaroslav Ponomarenko]()\n\n[Ejafa Bassam]()", "## Dataset Card Contact\n@Peking University", "## Acknowledgments\n\n[Laion (Christoph Schuhmann, Peter Bevan)]()\n\nGoogle Gemini-Pro" ]
[ 32, 225, 129, 26, 9, 29 ]
[ "passage: TAGS\n#size_categories-100K<n<1M #language-English #license-apache-2.0 #art #region-us \n# YE-POP (a derived dataset of Laion POP)\nYE-POP is a derived dataset from Laion-POP, meticulously curated and filtered to enhance the quality and utility of the original dataset. The dataset comprises 11 chunks, each containing 50,000 image URLs from Laion-POP. NSFW sorting has been used as a baseline, and human verification has been conducted to ensure the dataset's reliability.\n\nFor the initial comparison, Chunk 1 has been curated with Gemini-Pro and released as part of a research work to the community. For access to other chunks generated by gemini-pro, interested parties are encouraged to contact us. The primary goal of YE-POP is to provide a dataset with improved art image descriptions while retaining the essence of Laion-POP for baseline comparisons in diffusion models and image captioning tasks. We anticipate that training multimodal models on this dataset will lead to enhanced generation capabilities.## Dataset Details\nEach zip file contains predownloaded images, and the JSON file includes dictionaries of image features with the following fields: \n\n- 'filename'\n- 'url'\n- 'cogvlm_caption'\n- 'llava_caption'\n- 'nsfw_prediction'\n- 'alt_txt'\n- 'alt_txt_similarity'\n- 'width'\n- 'height'\n- 'original_width'\n- 'original_height'\n- 'exif'\n\nFor more detailed information on the fields, refer to the JSON file.## Dataset Card Authors \n\n[Yaroslav Ponomarenko]()\n\n[Ejafa Bassam]()## Dataset Card Contact\n@Peking University## Acknowledgments\n\n[Laion (Christoph Schuhmann, Peter Bevan)]()\n\nGoogle Gemini-Pro" ]
[ -0.053770724684000015, 0.08728457242250443, -0.004981784150004387, 0.0298849456012249, 0.01379961334168911, 0.028809085488319397, 0.09702489525079727, 0.09200412034988403, 0.06408325582742691, 0.07507770508527756, -0.049340322613716125, -0.13375745713710785, 0.1511223465204239, 0.116416797041893, -0.00678270123898983, -0.23781482875347137, 0.05056976154446602, 0.024826504290103912, 0.03877772018313408, 0.04897066205739975, 0.12131363898515701, -0.08555968105792999, 0.07918866723775864, 0.02436774969100952, -0.05546335130929947, 0.04850902408361435, -0.04136958345770836, 0.02227860875427723, 0.048855118453502655, 0.018696365877985954, 0.08059115707874298, 0.04935362935066223, 0.04298262670636177, -0.22658349573612213, 0.033462394028902054, 0.07648171484470367, 0.02954213134944439, 0.0390070416033268, 0.1425943821668625, -0.09347856044769287, 0.11132114380598068, -0.049076665192842484, 0.031443431973457336, 0.00566410506144166, -0.05312329903244972, -0.15239141881465912, -0.09483445435762405, -0.014112232252955437, 0.10340878367424011, -0.016232477501034737, -0.03275033459067345, -0.023327244445681572, -0.05256577208638191, 0.022730277851223946, 0.17438670992851257, -0.13100944459438324, -0.03986211493611336, 0.09524166584014893, -0.020542675629258156, 0.08358307927846909, -0.05347233638167381, -0.010167865082621574, 0.037970639765262604, -0.012219625525176525, -0.012193934060633183, -0.022564338520169258, -0.10088785737752914, -0.020356545224785805, -0.13408850133419037, 0.00899453368037939, 0.2942429780960083, 0.019649522379040718, -0.05423919856548309, -0.11844457685947418, -0.04629725217819214, 0.03573836758732796, -0.04019130766391754, -0.04331766068935394, 0.04795685410499573, -0.014261043630540371, 0.11338021606206894, -0.04740522801876068, -0.12168668210506439, -0.024822598323225975, -0.06624358892440796, 0.037001192569732666, 0.05736062675714493, 0.06371871381998062, 0.03839825838804245, 0.10014187544584274, -0.08509471267461777, -0.14711681008338928, -0.0805782899260521, -0.0798172727227211, -0.13379529118537903, -0.046323925256729126, -0.06833894550800323, -0.1483745276927948, 0.10117518156766891, 0.1365102082490921, 0.01786477491259575, 0.05044686421751976, -0.05245474725961685, 0.03460616618394852, 0.13199006021022797, 0.1096903532743454, 0.007159295491874218, -0.07096068561077118, 0.01813548244535923, 0.07049178332090378, -0.008146976120769978, 0.013616394251585007, -0.036929577589035034, -0.03281130641698837, 0.006372420582920313, -0.03569113463163376, 0.014492311514914036, 0.0050969915464520454, -0.108628049492836, -0.024377768859267235, 0.038205791264772415, -0.14514155685901642, 0.016253827139735222, -0.03259984776377678, -0.07792332023382187, 0.11589956283569336, 0.00574553944170475, 0.02694912627339363, -0.06218023598194122, 0.11702471971511841, -0.0645533874630928, 0.006417160853743553, -0.10671890527009964, -0.07346472889184952, 0.038289669901132584, -0.10611088573932648, -0.013089779764413834, -0.09617731720209122, -0.2609191834926605, -0.030503548681735992, 0.08481181412935257, -0.06957835704088211, 0.0075340187177062035, 0.00920430663973093, 0.0482935756444931, -0.05647098273038864, 0.010495518334209919, 0.07323736697435379, -0.07102711498737335, 0.012752274982631207, -0.08570818603038788, 0.10648371279239655, 0.03164735063910484, 0.02005656808614731, -0.0921277180314064, 0.0238080695271492, -0.16921328008174896, 0.11189554631710052, -0.15038512647151947, -0.13320615887641907, -0.12301687896251678, -0.011998532339930534, -0.13125678896903992, 0.008099576458334923, 0.007941661402583122, 0.09873002767562866, -0.2191951423883438, -0.01961570233106613, -0.005499264225363731, -0.16013164818286896, -0.04325779899954796, 0.11713696271181107, -0.047085151076316833, 0.0556555837392807, 0.036359261721372604, 0.2668495774269104, 0.08131749927997589, -0.1153600737452507, -0.08296732604503632, 0.019808700308203697, 0.041744165122509, 0.16551396250724792, 0.06459935009479523, -0.038747671991586685, 0.07555430382490158, 0.023034729063510895, -0.036158591508865356, -0.04986089840531349, -0.027871616184711456, -0.0056616272777318954, -0.023065803572535515, -0.009473913349211216, 0.006205594167113304, -0.026081211864948273, -0.03772474080324173, 0.004361399449408054, -0.04190568998456001, 0.010302604176104069, 0.08010529726743698, -0.05388009175658226, 0.030225403606891632, -0.026049070060253143, 0.12051479518413544, -0.07694683223962784, -0.01275054644793272, -0.13819633424282074, -0.12211236357688904, 0.08047617226839066, -0.07777474820613861, 0.027161048725247383, 0.07045882195234299, -0.0032380863558501005, 0.026954516768455505, -0.009504045359790325, 0.0009086215286515653, -0.050767142325639725, -0.026439504697918892, -0.0006377192330546677, -0.10154549032449722, -0.06815318763256073, -0.022194046527147293, 0.0786765068769455, -0.03903977945446968, -0.024407628923654556, 0.08344945311546326, 0.17255134880542755, 0.024746712297201157, -0.04443256929516792, 0.0006947924266569316, -0.006763021927326918, -0.04037763178348541, -0.03803214058279991, -0.04469337686896324, 0.0024596620351076126, 0.0732647255063057, 0.0630873441696167, 0.04785929247736931, -0.015112487599253654, 0.09770350903272629, 0.03269350156188011, -0.0286543071269989, -0.06832204759120941, -0.05332278832793236, -0.0321635939180851, -0.1690984070301056, -0.07016587257385254, 0.0919087827205658, 0.035460326820611954, 0.09483148902654648, -0.1382489949464798, -0.04619414359331131, 0.018132537603378296, -0.05876217037439346, -0.000183127325726673, -0.0011353902518749237, 0.12002331763505936, -0.03719457983970642, 0.10571768879890442, -0.028569409623742104, -0.01417711190879345, 0.1450956016778946, -0.05551309138536453, -0.12951436638832092, 0.01781868189573288, -0.010717513039708138, -0.08509036898612976, 0.17895080149173737, -0.022697506472468376, 0.05295209586620331, 0.09591042995452881, 0.05152367800474167, 0.09018497169017792, -0.10739585757255554, 0.06470266729593277, 0.03060801513493061, -0.057295460253953934, 0.023114100098609924, 0.05603540316224098, 0.02738020196557045, 0.09205086529254913, -0.058596666902303696, 0.06945428252220154, -0.08861951529979706, -0.04855608567595482, -0.0877332091331482, 0.1254740208387375, -0.08737029880285263, -0.16564303636550903, -0.11164729297161102, -0.026350127533078194, -0.09288756549358368, 0.0012232380686327815, 0.003239141311496496, 0.017429478466510773, -0.10643765330314636, -0.10838554054498672, 0.12224647402763367, -0.035643480718135834, -0.019119303673505783, -0.1101149320602417, -0.036688461899757385, -0.02999924123287201, -0.11332724243402481, -0.0018400541739538312, 0.0013928881380707026, -0.00014945538714528084, 0.0435635969042778, -0.027474815025925636, 0.06885244697332382, 0.04104015231132507, -0.04127492383122444, 0.015621541067957878, -0.03163645416498184, 0.19887249171733856, -0.06687910854816437, 0.1784645915031433, 0.16321074962615967, -0.017824767157435417, 0.05855593457818031, 0.10420363396406174, -0.005173059646040201, -0.0556139275431633, 0.015937428921461105, 0.06497883051633835, -0.012817895039916039, -0.22771520912647247, -0.08402945101261139, -0.0624748170375824, -0.030617350712418556, 0.057357046753168106, 0.0014897605869919062, 0.011658960953354836, 0.11336145550012589, -0.06948210299015045, 0.04156621918082237, 0.04600627347826958, 0.06568785756826401, 0.1483047902584076, 0.0026152816135436296, 0.08612867444753647, -0.04039844125509262, 0.018454955890774727, 0.08696070313453674, 0.05972393974661827, 0.2775686979293823, -0.08209042251110077, 0.02226877771317959, 0.08255138248205185, 0.03924637660384178, 0.05650368332862854, 0.032439813017845154, -0.03033393621444702, 0.045122358947992325, -0.008702710270881653, -0.03884070739150047, 0.020171549171209335, 0.06739892810583115, -0.02128412388265133, -0.026520634070038795, 0.016222964972257614, -0.022206537425518036, 0.03402496501803398, 0.1829146444797516, 0.05824899300932884, -0.15171606838703156, 0.03303113207221031, 0.04371285066008568, -0.01760724186897278, -0.08316739648580551, 0.0069152251817286015, 0.11651119589805603, -0.08978976309299469, 0.09106283634901047, -0.0025273868814110756, 0.07820867002010345, -0.07800564169883728, -0.06234486401081085, 0.07673230022192001, 0.05529534071683884, -0.0506887324154377, 0.04855106398463249, -0.08121500164270401, 0.12070836871862411, 0.03463399410247803, 0.014727097004652023, -0.03483598306775093, 0.05552093684673309, -0.03915327414870262, 0.04368072375655174, 0.06898800283670425, 0.017067842185497284, -0.06949479132890701, -0.032552074640989304, -0.04744439944624901, 0.058519478887319565, 0.08880548924207687, -0.07313759624958038, 0.06597009301185608, -0.018053563311696053, 0.027610255405306816, -0.07085607945919037, 0.06883316487073898, -0.1335056871175766, -0.1934327483177185, 0.02326153591275215, -0.058591000735759735, 0.022598667070269585, -0.05743517726659775, -0.025763345882296562, -0.1053779199719429, 0.06336769461631775, -0.03151100128889084, -0.12953034043312073, -0.10530608147382736, 0.08272077888250351, 0.1581922322511673, 0.010182810947299004, 0.05738699808716774, -0.01941865123808384, 0.1495761275291443, -0.08628812432289124, -0.14106647670269012, -0.06502418220043182, -0.052541062235832214, -0.08557208627462387, -0.08638884872198105, 0.17392750084400177, 0.03787650540471077, 0.0032002690713852644, 0.03632819652557373, 0.06206643953919411, -0.02349795028567314, -0.05176988244056702, 0.030257420614361763, 0.12252263724803925, 0.08734140545129776, 0.1355990171432495, -0.023223398253321648, -0.05621231347322464, -0.024282166734337807, -0.0061531998217105865, -0.0528661385178566, 0.16635924577713013, -0.059077680110931396, 0.04420076310634613, -0.02667999267578125, -0.1174163743853569, -0.15540307760238647, 0.08328627049922943, 0.1368933767080307, 0.034739743918180466, -0.014888429082930088, -0.17499017715454102, 0.11664538085460663, 0.10960293561220169, 0.0028464547358453274, 0.08957356959581375, -0.22773557901382446, -0.08437670022249222, -0.012090501375496387, -0.001368903205730021, 0.1058751791715622, -0.10314388573169708, -0.02624582126736641, -0.01803925447165966, -0.18336009979248047, 0.06733813136816025, 0.0199092086404562, 0.03603415563702583, -0.013090546242892742, 0.04681119695305824, 0.051251187920570374, -0.0008241155301220715, 0.18748266994953156, 0.06374017149209976, 0.0698494240641594, -0.013201157562434673, 0.1270950883626938, 0.0024222861975431442, -0.0020237101707607508, 0.1429751217365265, 0.045409563928842545, 0.03132260963320732, -0.06891050189733505, -0.04988150671124458, -0.012279031798243523, 0.094954714179039, -0.004600998014211655, -0.07371034473180771, -0.14676477015018463, 0.0401202030479908, 0.05222519487142563, 0.028271546587347984, -0.0022225312422960997, -0.053542498499155045, -0.04550847038626671, 0.06116730347275734, 0.08184633404016495, -0.03840343654155731, -0.17201213538646698, -0.0031400644220411777, -0.03506985679268837, 0.08515487611293793, -0.18039295077323914, 0.03827983886003494, 0.12983064353466034, 0.029263919219374657, 0.08507409691810608, 0.011834208853542805, -0.12366505712270737, -0.01004781387746334, 0.0727531760931015, -0.10541344434022903, -0.18636606633663177, -0.052516013383865356, -0.07463494688272476, -0.06352847814559937, -0.020379822701215744, 0.11603689193725586, -0.024120086804032326, -0.01871717907488346, 0.014078930020332336, 0.036344218999147415, 0.013144169934093952, 0.11645378917455673, -0.009140117093920708, 0.020197898149490356, -0.048965681344270706, 0.11953943222761154, 0.08045580983161926, -0.08538803458213806, -0.025398507714271545, -0.039687469601631165, -0.059294361621141434, -0.009051872417330742, -0.07414066046476364, 0.060498375445604324, 0.0815923735499382, -0.05558596923947334, -0.02405596151947975, -0.10941088199615479, -0.00708914827555418, 0.11792819201946259, 0.06577063351869583, 0.03324173018336296, 0.04772757738828659, -0.006631011608988047, -0.07340475916862488, 0.08413258194923401, 0.10407975316047668, 0.14262956380844116, -0.09566424787044525, -0.03176039829850197, -0.03562189266085625, -0.0248415544629097, -0.010939579457044601, -0.016988562420010567, -0.025475161150097847, -0.03790302202105522, -0.11117876321077347, 0.03781993314623833, -0.056806132197380066, -0.038487039506435394, 0.035185445100069046, -0.04768501594662666, -0.043203577399253845, 0.027768323197960854, -0.03100486844778061, -0.005312513560056686, -0.03361348435282707, 0.09556826949119568, -0.08343036472797394, -0.006354214623570442, 0.0851479023694992, -0.10924828052520752, 0.05590866133570671, -0.029322991147637367, -0.012650646269321442, -0.01026544626802206, -0.10041884332895279, -0.028317775577306747, 0.08123359829187393, 0.10983727872371674, -0.012762324884533882, -0.04464671388268471, 0.05238426476716995, 0.04085014760494232, -0.023446224629878998, -0.0154367471113801, 0.04829167574644089, -0.11149070411920547, 0.06128473952412605, -0.08534390479326248, 0.009415430948138237, -0.05728515610098839, 0.011862234212458134, 0.11394750326871872, 0.17818671464920044, 0.11415325105190277, -0.046837594360113144, 0.03489626571536064, -0.2005407065153122, -0.026850517839193344, 0.013970773667097092, 0.00091222720220685, 0.06686908006668091, -0.06353624165058136, 0.0529668964445591, -0.03375698998570442, 0.2582494020462036, 0.1411614716053009, 0.011395620182156563, -0.008376136422157288, 0.10669837892055511, 0.09405361115932465, 0.07469431310892105, 0.10427506268024445, -0.012439101003110409, 0.007425838150084019, -0.03794323652982712, 0.002569457283243537, 0.006879466585814953, 0.04945249482989311, 0.0588262565433979, 0.15357474982738495, -0.07387310266494751, 0.013006431050598621, -0.005782127380371094, -0.10749324411153793, 0.1134817898273468, 0.11435363441705704, -0.003777341451495886, 0.06054580584168434, 0.023651938885450363, 0.0427602156996727, 0.08416058123111725, -0.20152732729911804, 0.08777433633804321, 0.051111720502376556, -0.040363773703575134, -0.06924979388713837, -0.17942458391189575, -0.08392079919576645, -0.13253535330295563, 0.01890113577246666, -0.15317371487617493, 0.056978240609169006, 0.08075518906116486, 0.0018560049356892705, -0.016159744933247566, 0.04843834415078163, -0.09124664217233658, -0.13448506593704224, 0.04845491051673889, 0.012324575334787369, -0.01689041592180729, 0.035012874752283096, -0.044879697263240814, 0.07433989644050598, 0.035617005079984665, 0.01479592826217413, 0.003849914064630866, 0.09906874597072601, 0.008708562701940536, -0.053123168647289276, -0.0031505359802395105, 0.00042001024121418595, -0.09190636873245239, -0.029350299388170242, 0.08058828115463257, 0.0725845918059349, 0.014665158465504646, 0.005379623267799616, 0.1728506237268448, 0.030477631837129593, -0.10837915539741516, -0.18031930923461914, -0.05531615763902664, -0.02592283859848976, 0.0519120991230011, 0.07147276401519775, -0.11748290061950684, -0.03910720720887184, 0.1365143358707428, 0.08614928275346756, -0.01477723941206932, -0.0366227924823761, 0.0052525345236063, 0.000731457257643342, 0.017663855105638504, 0.03507458046078682, -0.001009894534945488, 0.1816394031047821, -0.020262043923139572, 0.014286112040281296, -0.06337394565343857, -0.008537541143596172, -0.04696889966726303, 0.134295254945755, 0.021448837593197823, -0.0768580213189125, -0.0681586042046547, 0.09726092964410782, 0.013806035742163658, -0.14607049524784088, 0.039580006152391434, -0.07328130304813385, -0.10102131217718124, -0.011304670013487339, 0.03278099000453949, -0.05165654793381691, -0.022374914959073067, -0.06701366603374481, 0.011779943481087685, 0.16576352715492249, 0.00032088003354147077, -0.10124436765909195, -0.0321645624935627, 0.05078642815351486, -0.18000563979148865, 0.11322867125272751, 0.030784670263528824, 0.15791188180446625, -0.0007614162750542164, 0.050546225160360336, -0.0686030387878418, 0.0027462292928248644, 0.03501199558377266, -0.023178325966000557, -0.0035393123980611563, 0.08727142214775085, 0.024789229035377502, 0.04162898659706116, 0.06551270186901093, -0.051060114055871964, 0.04997740313410759, 0.04096461087465286, -0.06973380595445633, -0.09421690553426743, 0.050708819180727005, -0.11791829019784927, 0.08541305363178253, 0.08824986219406128, 0.0008561418508179486, 0.018528440967202187, -0.06166877970099449, 0.046761032193899155, 0.024714890867471695, 0.14042235910892487, 0.03797684237360954, -0.07108563929796219, -0.0280457716435194, -0.11896374821662903, -0.016022179275751114, -0.02633914351463318, -0.00006866897456347942, -0.013779391534626484, -0.03706216439604759, -0.0046689556911587715, 0.1097463071346283, 0.04880591109395027, 0.015947945415973663, -0.04710080102086067, -0.15546640753746033, 0.056642331182956696, 0.05991959199309349, -0.029107805341482162, -0.0039647300727665424 ]
714599d3f1026a3e560d1dde2a285c396347a5b4
# Dataset Card for Evaluation run of nbeerbower/SuperBruphin-3x7B <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [nbeerbower/SuperBruphin-3x7B](https://huggingface.co/nbeerbower/SuperBruphin-3x7B) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_nbeerbower__SuperBruphin-3x7B", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-01-25T14:54:12.295884](https://huggingface.co/datasets/open-llm-leaderboard/details_nbeerbower__SuperBruphin-3x7B/blob/main/results_2024-01-25T14-54-12.295884.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.6511850889407919, "acc_stderr": 0.03204603027557277, "acc_norm": 0.6508528897499605, "acc_norm_stderr": 0.03270803997064092, "mc1": 0.5079559363525091, "mc1_stderr": 0.01750128507455182, "mc2": 0.6684621740241807, "mc2_stderr": 0.015158018152380857 }, "harness|arc:challenge|25": { "acc": 0.689419795221843, "acc_stderr": 0.013522292098053064, "acc_norm": 0.71160409556314, "acc_norm_stderr": 0.01323839442242818 }, "harness|hellaswag|10": { "acc": 0.6996614220274846, "acc_stderr": 0.004574683373821049, "acc_norm": 0.877414857598088, "acc_norm_stderr": 0.0032729014349397716 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.34, "acc_stderr": 0.04760952285695235, "acc_norm": 0.34, "acc_norm_stderr": 0.04760952285695235 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.6518518518518519, "acc_stderr": 0.041153246103369526, "acc_norm": 0.6518518518518519, "acc_norm_stderr": 0.041153246103369526 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.7039473684210527, "acc_stderr": 0.03715062154998904, "acc_norm": 0.7039473684210527, "acc_norm_stderr": 0.03715062154998904 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.64, "acc_stderr": 0.04824181513244218, "acc_norm": 0.64, "acc_norm_stderr": 0.04824181513244218 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.7056603773584905, "acc_stderr": 0.02804918631569525, "acc_norm": 0.7056603773584905, "acc_norm_stderr": 0.02804918631569525 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.75, "acc_stderr": 0.03621034121889507, "acc_norm": 0.75, "acc_norm_stderr": 0.03621034121889507 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.46, "acc_stderr": 0.05009082659620333, "acc_norm": 0.46, "acc_norm_stderr": 0.05009082659620333 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.51, "acc_stderr": 0.05024183937956911, "acc_norm": 0.51, "acc_norm_stderr": 0.05024183937956911 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.31, "acc_stderr": 0.04648231987117316, "acc_norm": 0.31, "acc_norm_stderr": 0.04648231987117316 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.6705202312138728, "acc_stderr": 0.03583901754736412, "acc_norm": 0.6705202312138728, "acc_norm_stderr": 0.03583901754736412 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.4215686274509804, "acc_stderr": 0.04913595201274498, "acc_norm": 0.4215686274509804, "acc_norm_stderr": 0.04913595201274498 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.79, "acc_stderr": 0.04093601807403326, "acc_norm": 0.79, "acc_norm_stderr": 0.04093601807403326 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.574468085106383, "acc_stderr": 0.03232146916224468, "acc_norm": 0.574468085106383, "acc_norm_stderr": 0.03232146916224468 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.49122807017543857, "acc_stderr": 0.04702880432049615, "acc_norm": 0.49122807017543857, "acc_norm_stderr": 0.04702880432049615 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.5517241379310345, "acc_stderr": 0.04144311810878152, "acc_norm": 0.5517241379310345, "acc_norm_stderr": 0.04144311810878152 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.42063492063492064, "acc_stderr": 0.025424835086924006, "acc_norm": 0.42063492063492064, "acc_norm_stderr": 0.025424835086924006 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.46825396825396826, "acc_stderr": 0.04463112720677172, "acc_norm": 0.46825396825396826, "acc_norm_stderr": 0.04463112720677172 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.32, "acc_stderr": 0.046882617226215034, "acc_norm": 0.32, "acc_norm_stderr": 0.046882617226215034 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.7774193548387097, "acc_stderr": 0.023664216671642518, "acc_norm": 0.7774193548387097, "acc_norm_stderr": 0.023664216671642518 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.5024630541871922, "acc_stderr": 0.035179450386910616, "acc_norm": 0.5024630541871922, "acc_norm_stderr": 0.035179450386910616 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.69, "acc_stderr": 0.04648231987117316, "acc_norm": 0.69, "acc_norm_stderr": 0.04648231987117316 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.7757575757575758, "acc_stderr": 0.032568666616811015, "acc_norm": 0.7757575757575758, "acc_norm_stderr": 0.032568666616811015 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.8131313131313131, "acc_stderr": 0.027772533334218964, "acc_norm": 0.8131313131313131, "acc_norm_stderr": 0.027772533334218964 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.9067357512953368, "acc_stderr": 0.02098685459328973, "acc_norm": 0.9067357512953368, "acc_norm_stderr": 0.02098685459328973 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.6717948717948717, "acc_stderr": 0.023807633198657266, "acc_norm": 0.6717948717948717, "acc_norm_stderr": 0.023807633198657266 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.3333333333333333, "acc_stderr": 0.02874204090394848, "acc_norm": 0.3333333333333333, "acc_norm_stderr": 0.02874204090394848 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.6764705882352942, "acc_stderr": 0.03038835355188679, "acc_norm": 0.6764705882352942, "acc_norm_stderr": 0.03038835355188679 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.3708609271523179, "acc_stderr": 0.03943966699183629, "acc_norm": 0.3708609271523179, "acc_norm_stderr": 0.03943966699183629 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.8385321100917431, "acc_stderr": 0.015776239256163248, "acc_norm": 0.8385321100917431, "acc_norm_stderr": 0.015776239256163248 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.5, "acc_stderr": 0.034099716973523674, "acc_norm": 0.5, "acc_norm_stderr": 0.034099716973523674 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.8578431372549019, "acc_stderr": 0.0245098039215686, "acc_norm": 0.8578431372549019, "acc_norm_stderr": 0.0245098039215686 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.7932489451476793, "acc_stderr": 0.026361651668389094, "acc_norm": 0.7932489451476793, "acc_norm_stderr": 0.026361651668389094 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.6905829596412556, "acc_stderr": 0.03102441174057221, "acc_norm": 0.6905829596412556, "acc_norm_stderr": 0.03102441174057221 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.7862595419847328, "acc_stderr": 0.0359546161177469, "acc_norm": 0.7862595419847328, "acc_norm_stderr": 0.0359546161177469 }, "harness|hendrycksTest-international_law|5": { "acc": 0.7768595041322314, "acc_stderr": 0.03800754475228732, "acc_norm": 0.7768595041322314, "acc_norm_stderr": 0.03800754475228732 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.7685185185185185, "acc_stderr": 0.04077494709252626, "acc_norm": 0.7685185185185185, "acc_norm_stderr": 0.04077494709252626 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.754601226993865, "acc_stderr": 0.03380939813943354, "acc_norm": 0.754601226993865, "acc_norm_stderr": 0.03380939813943354 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.44642857142857145, "acc_stderr": 0.04718471485219588, "acc_norm": 0.44642857142857145, "acc_norm_stderr": 0.04718471485219588 }, "harness|hendrycksTest-management|5": { "acc": 0.7766990291262136, "acc_stderr": 0.04123553189891431, "acc_norm": 0.7766990291262136, "acc_norm_stderr": 0.04123553189891431 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8675213675213675, "acc_stderr": 0.022209309073165612, "acc_norm": 0.8675213675213675, "acc_norm_stderr": 0.022209309073165612 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.74, "acc_stderr": 0.04408440022768078, "acc_norm": 0.74, "acc_norm_stderr": 0.04408440022768078 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.8314176245210728, "acc_stderr": 0.013387895731543604, "acc_norm": 0.8314176245210728, "acc_norm_stderr": 0.013387895731543604 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.7369942196531792, "acc_stderr": 0.023703099525258176, "acc_norm": 0.7369942196531792, "acc_norm_stderr": 0.023703099525258176 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.4, "acc_stderr": 0.016384638410380823, "acc_norm": 0.4, "acc_norm_stderr": 0.016384638410380823 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.7189542483660131, "acc_stderr": 0.025738854797818733, "acc_norm": 0.7189542483660131, "acc_norm_stderr": 0.025738854797818733 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.7170418006430869, "acc_stderr": 0.025583062489984813, "acc_norm": 0.7170418006430869, "acc_norm_stderr": 0.025583062489984813 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.7592592592592593, "acc_stderr": 0.023788583551658533, "acc_norm": 0.7592592592592593, "acc_norm_stderr": 0.023788583551658533 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.4645390070921986, "acc_stderr": 0.029752389657427047, "acc_norm": 0.4645390070921986, "acc_norm_stderr": 0.029752389657427047 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.46740547588005216, "acc_stderr": 0.012743072942653349, "acc_norm": 0.46740547588005216, "acc_norm_stderr": 0.012743072942653349 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.6727941176470589, "acc_stderr": 0.028501452860396556, "acc_norm": 0.6727941176470589, "acc_norm_stderr": 0.028501452860396556 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.6683006535947712, "acc_stderr": 0.01904748523936038, "acc_norm": 0.6683006535947712, "acc_norm_stderr": 0.01904748523936038 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6727272727272727, "acc_stderr": 0.0449429086625209, "acc_norm": 0.6727272727272727, "acc_norm_stderr": 0.0449429086625209 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.7387755102040816, "acc_stderr": 0.028123429335142773, "acc_norm": 0.7387755102040816, "acc_norm_stderr": 0.028123429335142773 }, "harness|hendrycksTest-sociology|5": { "acc": 0.845771144278607, "acc_stderr": 0.025538433368578337, "acc_norm": 0.845771144278607, "acc_norm_stderr": 0.025538433368578337 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.85, "acc_stderr": 0.0358870281282637, "acc_norm": 0.85, "acc_norm_stderr": 0.0358870281282637 }, "harness|hendrycksTest-virology|5": { "acc": 0.536144578313253, "acc_stderr": 0.038823108508905954, "acc_norm": 0.536144578313253, "acc_norm_stderr": 0.038823108508905954 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.8362573099415205, "acc_stderr": 0.028380919596145866, "acc_norm": 0.8362573099415205, "acc_norm_stderr": 0.028380919596145866 }, "harness|truthfulqa:mc|0": { "mc1": 0.5079559363525091, "mc1_stderr": 0.01750128507455182, "mc2": 0.6684621740241807, "mc2_stderr": 0.015158018152380857 }, "harness|winogrande|5": { "acc": 0.8153117600631413, "acc_stderr": 0.010905978112156878 }, "harness|gsm8k|5": { "acc": 0.7065959059893859, "acc_stderr": 0.01254183081546149 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_nbeerbower__SuperBruphin-3x7B
[ "region:us" ]
2024-01-25T14:56:31+00:00
{"pretty_name": "Evaluation run of nbeerbower/SuperBruphin-3x7B", "dataset_summary": "Dataset automatically created during the evaluation run of model [nbeerbower/SuperBruphin-3x7B](https://huggingface.co/nbeerbower/SuperBruphin-3x7B) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_nbeerbower__SuperBruphin-3x7B\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-01-25T14:54:12.295884](https://huggingface.co/datasets/open-llm-leaderboard/details_nbeerbower__SuperBruphin-3x7B/blob/main/results_2024-01-25T14-54-12.295884.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.6511850889407919,\n \"acc_stderr\": 0.03204603027557277,\n \"acc_norm\": 0.6508528897499605,\n \"acc_norm_stderr\": 0.03270803997064092,\n \"mc1\": 0.5079559363525091,\n \"mc1_stderr\": 0.01750128507455182,\n \"mc2\": 0.6684621740241807,\n \"mc2_stderr\": 0.015158018152380857\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.689419795221843,\n \"acc_stderr\": 0.013522292098053064,\n \"acc_norm\": 0.71160409556314,\n \"acc_norm_stderr\": 0.01323839442242818\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6996614220274846,\n \"acc_stderr\": 0.004574683373821049,\n \"acc_norm\": 0.877414857598088,\n \"acc_norm_stderr\": 0.0032729014349397716\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.34,\n \"acc_stderr\": 0.04760952285695235,\n \"acc_norm\": 0.34,\n \"acc_norm_stderr\": 0.04760952285695235\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.6518518518518519,\n \"acc_stderr\": 0.041153246103369526,\n \"acc_norm\": 0.6518518518518519,\n \"acc_norm_stderr\": 0.041153246103369526\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.7039473684210527,\n \"acc_stderr\": 0.03715062154998904,\n \"acc_norm\": 0.7039473684210527,\n \"acc_norm_stderr\": 0.03715062154998904\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.64,\n \"acc_stderr\": 0.04824181513244218,\n \"acc_norm\": 0.64,\n \"acc_norm_stderr\": 0.04824181513244218\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.7056603773584905,\n \"acc_stderr\": 0.02804918631569525,\n \"acc_norm\": 0.7056603773584905,\n \"acc_norm_stderr\": 0.02804918631569525\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.75,\n \"acc_stderr\": 0.03621034121889507,\n \"acc_norm\": 0.75,\n \"acc_norm_stderr\": 0.03621034121889507\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.46,\n \"acc_stderr\": 0.05009082659620333,\n \"acc_norm\": 0.46,\n \"acc_norm_stderr\": 0.05009082659620333\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.51,\n \"acc_stderr\": 0.05024183937956911,\n \"acc_norm\": 0.51,\n \"acc_norm_stderr\": 0.05024183937956911\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.31,\n \"acc_stderr\": 0.04648231987117316,\n \"acc_norm\": 0.31,\n \"acc_norm_stderr\": 0.04648231987117316\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.6705202312138728,\n \"acc_stderr\": 0.03583901754736412,\n \"acc_norm\": 0.6705202312138728,\n \"acc_norm_stderr\": 0.03583901754736412\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.4215686274509804,\n \"acc_stderr\": 0.04913595201274498,\n \"acc_norm\": 0.4215686274509804,\n \"acc_norm_stderr\": 0.04913595201274498\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.79,\n \"acc_stderr\": 0.04093601807403326,\n \"acc_norm\": 0.79,\n \"acc_norm_stderr\": 0.04093601807403326\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.574468085106383,\n \"acc_stderr\": 0.03232146916224468,\n \"acc_norm\": 0.574468085106383,\n \"acc_norm_stderr\": 0.03232146916224468\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.49122807017543857,\n \"acc_stderr\": 0.04702880432049615,\n \"acc_norm\": 0.49122807017543857,\n \"acc_norm_stderr\": 0.04702880432049615\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.5517241379310345,\n \"acc_stderr\": 0.04144311810878152,\n \"acc_norm\": 0.5517241379310345,\n \"acc_norm_stderr\": 0.04144311810878152\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.42063492063492064,\n \"acc_stderr\": 0.025424835086924006,\n \"acc_norm\": 0.42063492063492064,\n \"acc_norm_stderr\": 0.025424835086924006\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.46825396825396826,\n \"acc_stderr\": 0.04463112720677172,\n \"acc_norm\": 0.46825396825396826,\n \"acc_norm_stderr\": 0.04463112720677172\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.32,\n \"acc_stderr\": 0.046882617226215034,\n \"acc_norm\": 0.32,\n \"acc_norm_stderr\": 0.046882617226215034\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.7774193548387097,\n \"acc_stderr\": 0.023664216671642518,\n \"acc_norm\": 0.7774193548387097,\n \"acc_norm_stderr\": 0.023664216671642518\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.5024630541871922,\n \"acc_stderr\": 0.035179450386910616,\n \"acc_norm\": 0.5024630541871922,\n \"acc_norm_stderr\": 0.035179450386910616\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.69,\n \"acc_stderr\": 0.04648231987117316,\n \"acc_norm\": 0.69,\n \"acc_norm_stderr\": 0.04648231987117316\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.7757575757575758,\n \"acc_stderr\": 0.032568666616811015,\n \"acc_norm\": 0.7757575757575758,\n \"acc_norm_stderr\": 0.032568666616811015\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.8131313131313131,\n \"acc_stderr\": 0.027772533334218964,\n \"acc_norm\": 0.8131313131313131,\n \"acc_norm_stderr\": 0.027772533334218964\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.9067357512953368,\n \"acc_stderr\": 0.02098685459328973,\n \"acc_norm\": 0.9067357512953368,\n \"acc_norm_stderr\": 0.02098685459328973\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.6717948717948717,\n \"acc_stderr\": 0.023807633198657266,\n \"acc_norm\": 0.6717948717948717,\n \"acc_norm_stderr\": 0.023807633198657266\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.3333333333333333,\n \"acc_stderr\": 0.02874204090394848,\n \"acc_norm\": 0.3333333333333333,\n \"acc_norm_stderr\": 0.02874204090394848\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.6764705882352942,\n \"acc_stderr\": 0.03038835355188679,\n \"acc_norm\": 0.6764705882352942,\n \"acc_norm_stderr\": 0.03038835355188679\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.3708609271523179,\n \"acc_stderr\": 0.03943966699183629,\n \"acc_norm\": 0.3708609271523179,\n \"acc_norm_stderr\": 0.03943966699183629\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.8385321100917431,\n \"acc_stderr\": 0.015776239256163248,\n \"acc_norm\": 0.8385321100917431,\n \"acc_norm_stderr\": 0.015776239256163248\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.5,\n \"acc_stderr\": 0.034099716973523674,\n \"acc_norm\": 0.5,\n \"acc_norm_stderr\": 0.034099716973523674\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.8578431372549019,\n \"acc_stderr\": 0.0245098039215686,\n \"acc_norm\": 0.8578431372549019,\n \"acc_norm_stderr\": 0.0245098039215686\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.7932489451476793,\n \"acc_stderr\": 0.026361651668389094,\n \"acc_norm\": 0.7932489451476793,\n \"acc_norm_stderr\": 0.026361651668389094\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.6905829596412556,\n \"acc_stderr\": 0.03102441174057221,\n \"acc_norm\": 0.6905829596412556,\n \"acc_norm_stderr\": 0.03102441174057221\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.7862595419847328,\n \"acc_stderr\": 0.0359546161177469,\n \"acc_norm\": 0.7862595419847328,\n \"acc_norm_stderr\": 0.0359546161177469\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.7768595041322314,\n \"acc_stderr\": 0.03800754475228732,\n \"acc_norm\": 0.7768595041322314,\n \"acc_norm_stderr\": 0.03800754475228732\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.7685185185185185,\n \"acc_stderr\": 0.04077494709252626,\n \"acc_norm\": 0.7685185185185185,\n \"acc_norm_stderr\": 0.04077494709252626\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.754601226993865,\n \"acc_stderr\": 0.03380939813943354,\n \"acc_norm\": 0.754601226993865,\n \"acc_norm_stderr\": 0.03380939813943354\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.44642857142857145,\n \"acc_stderr\": 0.04718471485219588,\n \"acc_norm\": 0.44642857142857145,\n \"acc_norm_stderr\": 0.04718471485219588\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.7766990291262136,\n \"acc_stderr\": 0.04123553189891431,\n \"acc_norm\": 0.7766990291262136,\n \"acc_norm_stderr\": 0.04123553189891431\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8675213675213675,\n \"acc_stderr\": 0.022209309073165612,\n \"acc_norm\": 0.8675213675213675,\n \"acc_norm_stderr\": 0.022209309073165612\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.74,\n \"acc_stderr\": 0.04408440022768078,\n \"acc_norm\": 0.74,\n \"acc_norm_stderr\": 0.04408440022768078\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.8314176245210728,\n \"acc_stderr\": 0.013387895731543604,\n \"acc_norm\": 0.8314176245210728,\n \"acc_norm_stderr\": 0.013387895731543604\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.7369942196531792,\n \"acc_stderr\": 0.023703099525258176,\n \"acc_norm\": 0.7369942196531792,\n \"acc_norm_stderr\": 0.023703099525258176\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.4,\n \"acc_stderr\": 0.016384638410380823,\n \"acc_norm\": 0.4,\n \"acc_norm_stderr\": 0.016384638410380823\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.7189542483660131,\n \"acc_stderr\": 0.025738854797818733,\n \"acc_norm\": 0.7189542483660131,\n \"acc_norm_stderr\": 0.025738854797818733\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.7170418006430869,\n \"acc_stderr\": 0.025583062489984813,\n \"acc_norm\": 0.7170418006430869,\n \"acc_norm_stderr\": 0.025583062489984813\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.7592592592592593,\n \"acc_stderr\": 0.023788583551658533,\n \"acc_norm\": 0.7592592592592593,\n \"acc_norm_stderr\": 0.023788583551658533\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.4645390070921986,\n \"acc_stderr\": 0.029752389657427047,\n \"acc_norm\": 0.4645390070921986,\n \"acc_norm_stderr\": 0.029752389657427047\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.46740547588005216,\n \"acc_stderr\": 0.012743072942653349,\n \"acc_norm\": 0.46740547588005216,\n \"acc_norm_stderr\": 0.012743072942653349\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.6727941176470589,\n \"acc_stderr\": 0.028501452860396556,\n \"acc_norm\": 0.6727941176470589,\n \"acc_norm_stderr\": 0.028501452860396556\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.6683006535947712,\n \"acc_stderr\": 0.01904748523936038,\n \"acc_norm\": 0.6683006535947712,\n \"acc_norm_stderr\": 0.01904748523936038\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6727272727272727,\n \"acc_stderr\": 0.0449429086625209,\n \"acc_norm\": 0.6727272727272727,\n \"acc_norm_stderr\": 0.0449429086625209\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.7387755102040816,\n \"acc_stderr\": 0.028123429335142773,\n \"acc_norm\": 0.7387755102040816,\n \"acc_norm_stderr\": 0.028123429335142773\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.845771144278607,\n \"acc_stderr\": 0.025538433368578337,\n \"acc_norm\": 0.845771144278607,\n \"acc_norm_stderr\": 0.025538433368578337\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.85,\n \"acc_stderr\": 0.0358870281282637,\n \"acc_norm\": 0.85,\n \"acc_norm_stderr\": 0.0358870281282637\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.536144578313253,\n \"acc_stderr\": 0.038823108508905954,\n \"acc_norm\": 0.536144578313253,\n \"acc_norm_stderr\": 0.038823108508905954\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.8362573099415205,\n \"acc_stderr\": 0.028380919596145866,\n \"acc_norm\": 0.8362573099415205,\n \"acc_norm_stderr\": 0.028380919596145866\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.5079559363525091,\n \"mc1_stderr\": 0.01750128507455182,\n \"mc2\": 0.6684621740241807,\n \"mc2_stderr\": 0.015158018152380857\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.8153117600631413,\n \"acc_stderr\": 0.010905978112156878\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.7065959059893859,\n \"acc_stderr\": 0.01254183081546149\n }\n}\n```", "repo_url": "https://huggingface.co/nbeerbower/SuperBruphin-3x7B", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_01_25T14_54_12.295884", "path": ["**/details_harness|arc:challenge|25_2024-01-25T14-54-12.295884.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-01-25T14-54-12.295884.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_01_25T14_54_12.295884", "path": ["**/details_harness|gsm8k|5_2024-01-25T14-54-12.295884.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-01-25T14-54-12.295884.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_01_25T14_54_12.295884", "path": ["**/details_harness|hellaswag|10_2024-01-25T14-54-12.295884.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-01-25T14-54-12.295884.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_01_25T14_54_12.295884", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-25T14-54-12.295884.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-25T14-54-12.295884.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-25T14-54-12.295884.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-25T14-54-12.295884.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-25T14-54-12.295884.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-25T14-54-12.295884.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-25T14-54-12.295884.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-25T14-54-12.295884.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-25T14-54-12.295884.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-25T14-54-12.295884.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-25T14-54-12.295884.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-25T14-54-12.295884.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-25T14-54-12.295884.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-25T14-54-12.295884.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-25T14-54-12.295884.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-25T14-54-12.295884.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-25T14-54-12.295884.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-25T14-54-12.295884.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-25T14-54-12.295884.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-25T14-54-12.295884.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-25T14-54-12.295884.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-25T14-54-12.295884.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-25T14-54-12.295884.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-25T14-54-12.295884.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-25T14-54-12.295884.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-25T14-54-12.295884.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-25T14-54-12.295884.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-25T14-54-12.295884.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-25T14-54-12.295884.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-25T14-54-12.295884.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-25T14-54-12.295884.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-25T14-54-12.295884.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-25T14-54-12.295884.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-25T14-54-12.295884.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-25T14-54-12.295884.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-25T14-54-12.295884.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-25T14-54-12.295884.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-25T14-54-12.295884.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-25T14-54-12.295884.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-25T14-54-12.295884.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-25T14-54-12.295884.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-25T14-54-12.295884.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-25T14-54-12.295884.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-25T14-54-12.295884.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-25T14-54-12.295884.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-25T14-54-12.295884.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-25T14-54-12.295884.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-25T14-54-12.295884.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-25T14-54-12.295884.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-25T14-54-12.295884.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-25T14-54-12.295884.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-25T14-54-12.295884.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-25T14-54-12.295884.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-25T14-54-12.295884.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-25T14-54-12.295884.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-25T14-54-12.295884.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-25T14-54-12.295884.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-25T14-54-12.295884.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-25T14-54-12.295884.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-25T14-54-12.295884.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-25T14-54-12.295884.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-25T14-54-12.295884.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-25T14-54-12.295884.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-25T14-54-12.295884.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-25T14-54-12.295884.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-25T14-54-12.295884.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-25T14-54-12.295884.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-25T14-54-12.295884.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-25T14-54-12.295884.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-25T14-54-12.295884.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-25T14-54-12.295884.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-25T14-54-12.295884.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-25T14-54-12.295884.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-25T14-54-12.295884.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-25T14-54-12.295884.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-25T14-54-12.295884.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-25T14-54-12.295884.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-25T14-54-12.295884.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-25T14-54-12.295884.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-25T14-54-12.295884.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-25T14-54-12.295884.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-25T14-54-12.295884.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-25T14-54-12.295884.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-25T14-54-12.295884.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-25T14-54-12.295884.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-25T14-54-12.295884.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-25T14-54-12.295884.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-25T14-54-12.295884.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-25T14-54-12.295884.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-25T14-54-12.295884.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-25T14-54-12.295884.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-25T14-54-12.295884.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-25T14-54-12.295884.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-25T14-54-12.295884.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-25T14-54-12.295884.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-25T14-54-12.295884.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-25T14-54-12.295884.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-25T14-54-12.295884.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-25T14-54-12.295884.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-25T14-54-12.295884.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-25T14-54-12.295884.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-25T14-54-12.295884.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-25T14-54-12.295884.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-25T14-54-12.295884.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-25T14-54-12.295884.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-25T14-54-12.295884.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-25T14-54-12.295884.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-25T14-54-12.295884.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-25T14-54-12.295884.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-25T14-54-12.295884.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-25T14-54-12.295884.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-25T14-54-12.295884.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-25T14-54-12.295884.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-25T14-54-12.295884.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_01_25T14_54_12.295884", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-25T14-54-12.295884.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-25T14-54-12.295884.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_01_25T14_54_12.295884", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-25T14-54-12.295884.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-25T14-54-12.295884.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_01_25T14_54_12.295884", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-25T14-54-12.295884.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-25T14-54-12.295884.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_01_25T14_54_12.295884", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-25T14-54-12.295884.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-25T14-54-12.295884.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_01_25T14_54_12.295884", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-25T14-54-12.295884.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-25T14-54-12.295884.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_01_25T14_54_12.295884", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-25T14-54-12.295884.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-25T14-54-12.295884.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_01_25T14_54_12.295884", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-25T14-54-12.295884.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-25T14-54-12.295884.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_01_25T14_54_12.295884", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-25T14-54-12.295884.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-25T14-54-12.295884.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_01_25T14_54_12.295884", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-25T14-54-12.295884.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-25T14-54-12.295884.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_01_25T14_54_12.295884", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-25T14-54-12.295884.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-25T14-54-12.295884.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_01_25T14_54_12.295884", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-25T14-54-12.295884.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-25T14-54-12.295884.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_01_25T14_54_12.295884", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-25T14-54-12.295884.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-25T14-54-12.295884.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_01_25T14_54_12.295884", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-25T14-54-12.295884.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-25T14-54-12.295884.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_01_25T14_54_12.295884", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-25T14-54-12.295884.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-25T14-54-12.295884.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_01_25T14_54_12.295884", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-25T14-54-12.295884.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-25T14-54-12.295884.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_01_25T14_54_12.295884", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-25T14-54-12.295884.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-25T14-54-12.295884.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_01_25T14_54_12.295884", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-25T14-54-12.295884.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-25T14-54-12.295884.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_01_25T14_54_12.295884", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-25T14-54-12.295884.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-25T14-54-12.295884.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_01_25T14_54_12.295884", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-25T14-54-12.295884.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-25T14-54-12.295884.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_01_25T14_54_12.295884", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-25T14-54-12.295884.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-25T14-54-12.295884.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_01_25T14_54_12.295884", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-25T14-54-12.295884.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-25T14-54-12.295884.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_01_25T14_54_12.295884", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-25T14-54-12.295884.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-25T14-54-12.295884.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_01_25T14_54_12.295884", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-25T14-54-12.295884.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-25T14-54-12.295884.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_01_25T14_54_12.295884", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-25T14-54-12.295884.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-25T14-54-12.295884.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_01_25T14_54_12.295884", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-25T14-54-12.295884.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-25T14-54-12.295884.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_01_25T14_54_12.295884", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-25T14-54-12.295884.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-25T14-54-12.295884.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_01_25T14_54_12.295884", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-25T14-54-12.295884.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-25T14-54-12.295884.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_01_25T14_54_12.295884", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-25T14-54-12.295884.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-25T14-54-12.295884.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_01_25T14_54_12.295884", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-25T14-54-12.295884.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-25T14-54-12.295884.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_01_25T14_54_12.295884", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-25T14-54-12.295884.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-25T14-54-12.295884.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_01_25T14_54_12.295884", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-25T14-54-12.295884.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-25T14-54-12.295884.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_01_25T14_54_12.295884", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-25T14-54-12.295884.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-25T14-54-12.295884.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_01_25T14_54_12.295884", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-25T14-54-12.295884.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-25T14-54-12.295884.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_01_25T14_54_12.295884", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-25T14-54-12.295884.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-25T14-54-12.295884.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_01_25T14_54_12.295884", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-25T14-54-12.295884.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-25T14-54-12.295884.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_01_25T14_54_12.295884", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-25T14-54-12.295884.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-25T14-54-12.295884.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_01_25T14_54_12.295884", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-25T14-54-12.295884.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-25T14-54-12.295884.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_01_25T14_54_12.295884", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-25T14-54-12.295884.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-25T14-54-12.295884.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_01_25T14_54_12.295884", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-25T14-54-12.295884.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-25T14-54-12.295884.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_01_25T14_54_12.295884", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-25T14-54-12.295884.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-25T14-54-12.295884.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_01_25T14_54_12.295884", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-25T14-54-12.295884.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-25T14-54-12.295884.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_01_25T14_54_12.295884", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-25T14-54-12.295884.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-25T14-54-12.295884.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_01_25T14_54_12.295884", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-25T14-54-12.295884.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-25T14-54-12.295884.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_01_25T14_54_12.295884", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-25T14-54-12.295884.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-25T14-54-12.295884.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_01_25T14_54_12.295884", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-25T14-54-12.295884.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-25T14-54-12.295884.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_01_25T14_54_12.295884", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-25T14-54-12.295884.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-25T14-54-12.295884.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_01_25T14_54_12.295884", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-25T14-54-12.295884.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-25T14-54-12.295884.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_01_25T14_54_12.295884", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-25T14-54-12.295884.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-25T14-54-12.295884.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_01_25T14_54_12.295884", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-25T14-54-12.295884.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-25T14-54-12.295884.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_01_25T14_54_12.295884", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-25T14-54-12.295884.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-25T14-54-12.295884.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_01_25T14_54_12.295884", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-25T14-54-12.295884.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-25T14-54-12.295884.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_01_25T14_54_12.295884", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-25T14-54-12.295884.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-25T14-54-12.295884.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_01_25T14_54_12.295884", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-25T14-54-12.295884.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-25T14-54-12.295884.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_01_25T14_54_12.295884", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-25T14-54-12.295884.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-25T14-54-12.295884.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_01_25T14_54_12.295884", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-25T14-54-12.295884.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-25T14-54-12.295884.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_01_25T14_54_12.295884", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-25T14-54-12.295884.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-25T14-54-12.295884.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_01_25T14_54_12.295884", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-25T14-54-12.295884.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-25T14-54-12.295884.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_01_25T14_54_12.295884", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-25T14-54-12.295884.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-25T14-54-12.295884.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_01_25T14_54_12.295884", "path": ["**/details_harness|winogrande|5_2024-01-25T14-54-12.295884.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-01-25T14-54-12.295884.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_01_25T14_54_12.295884", "path": ["results_2024-01-25T14-54-12.295884.parquet"]}, {"split": "latest", "path": ["results_2024-01-25T14-54-12.295884.parquet"]}]}]}
2024-01-25T14:56:52+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of nbeerbower/SuperBruphin-3x7B Dataset automatically created during the evaluation run of model nbeerbower/SuperBruphin-3x7B on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-01-25T14:54:12.295884(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of nbeerbower/SuperBruphin-3x7B\n\n\n\nDataset automatically created during the evaluation run of model nbeerbower/SuperBruphin-3x7B on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-25T14:54:12.295884(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of nbeerbower/SuperBruphin-3x7B\n\n\n\nDataset automatically created during the evaluation run of model nbeerbower/SuperBruphin-3x7B on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-25T14:54:12.295884(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ 6, 189, 68, 4, 40, 29, 3, 4, 9, 6, 5, 7, 4, 7, 10, 9, 5, 9, 8, 10, 46, 8, 7, 10, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of nbeerbower/SuperBruphin-3x7B\n\n\n\nDataset automatically created during the evaluation run of model nbeerbower/SuperBruphin-3x7B on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2024-01-25T14:54:12.295884(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):## Dataset Details### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:## Uses### Direct Use### Out-of-Scope Use## Dataset Structure## Dataset Creation### Curation Rationale### Source Data#### Data Collection and Processing#### Who are the source data producers?### Annotations [optional]#### Annotation process#### Who are the annotators?#### Personal and Sensitive Information## Bias, Risks, and Limitations### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:## Glossary [optional]## More Information [optional]## Dataset Card Authors [optional]" ]
[ -0.04749538376927376, 0.21586602926254272, -0.004761763382703066, 0.042484864592552185, 0.08146079629659653, -0.012888228520751, 0.030345117673277855, 0.10553479939699173, 0.011577442288398743, 0.18139801919460297, -0.023122314363718033, 0.09474338591098785, 0.07027062773704529, 0.1345532089471817, 0.019535362720489502, -0.14430786669254303, 0.018147123977541924, -0.08164426684379578, 0.06830064952373505, 0.07935027033090591, 0.07163544744253159, -0.0888160914182663, 0.06222280487418175, -0.03400195762515068, 0.013910941779613495, -0.011486689560115337, -0.08450878411531448, -0.0357099287211895, 0.09413478523492813, 0.10837177187204361, 0.03512236103415489, -0.007524710148572922, 0.0182608924806118, -0.2617698609828949, 0.013918126001954079, 0.0985456258058548, -0.007977640256285667, 0.03919545188546181, 0.12941470742225647, -0.06522879004478455, 0.06929421424865723, -0.04665375128388405, 0.07112716138362885, 0.058571964502334595, -0.11959536373615265, -0.1389516144990921, -0.1395074427127838, -0.0009893079986795783, 0.06785397231578827, 0.032153233885765076, -0.023184332996606827, 0.14644880592823029, -0.049140118062496185, 0.04724656790494919, 0.13213862478733063, -0.11717916280031204, -0.02544599585235119, 0.06149723753333092, 0.005172311794012785, 0.06890958547592163, -0.08704398572444916, -0.015747984871268272, 0.03515114262700081, 0.052789006382226944, 0.01983814872801304, 0.010172453708946705, -0.03025950863957405, 0.0179753415286541, -0.15062151849269867, -0.12267201393842697, 0.14498522877693176, 0.022073140367865562, -0.043604664504528046, -0.1827564537525177, -0.0230790413916111, 0.020033355802297592, 0.004108411725610495, -0.03538797050714493, -0.0005002339603379369, -0.029475685209035873, 0.08251696825027466, -0.012856821529567242, -0.09152316302061081, -0.02305704914033413, 0.005075550638139248, 0.06685149669647217, 0.018606796860694885, -0.010260244831442833, 0.014646326191723347, 0.10925013571977615, -0.014062108471989632, -0.07588206231594086, -0.06910926103591919, -0.06287898123264313, -0.11311467736959457, -0.03837059438228607, 0.013644052669405937, -0.0726197361946106, 0.03333311155438423, 0.2373596876859665, -0.012213369831442833, 0.028131548315286636, -0.10323529690504074, 0.015943516045808792, 0.12158843129873276, 0.04462720453739166, -0.06971763074398041, -0.0589832179248333, -0.021547727286815643, 0.027950270101428032, 0.03779156506061554, -0.02442270889878273, 0.014014368876814842, 0.06726519018411636, 0.029388833791017532, 0.10479686409235, 0.12543445825576782, 0.03742499649524689, -0.06992152333259583, -0.029391944408416748, 0.2442723959684372, -0.1388823240995407, -0.01796630024909973, 0.018146967515349388, -0.05052938312292099, -0.12109128385782242, 0.08682256937026978, 0.00002932243478426244, -0.044570840895175934, 0.1371474713087082, -0.04007976874709129, -0.09086242318153381, -0.07316463440656662, -0.059289053082466125, 0.06086285412311554, 0.011658905074000359, -0.03775898367166519, -0.09093250334262848, -0.077287957072258, -0.07737166434526443, 0.030365053564310074, -0.0576900951564312, -0.038106609135866165, 0.01799297332763672, -0.005113762803375721, -0.015576052479445934, -0.017426706850528717, 0.09017972648143768, -0.05543581768870354, 0.03025556169450283, 0.005219347774982452, 0.02680548094213009, 0.0817168802022934, 0.04139110818505287, -0.11809108406305313, 0.07961104810237885, -0.13570088148117065, 0.08470310270786285, -0.11699379980564117, -0.0027246263343840837, -0.13145282864570618, -0.018282826989889145, -0.017780635505914688, 0.02197999507188797, -0.015626829117536545, 0.08458433300256729, -0.20624764263629913, 0.0011763833463191986, 0.14221960306167603, -0.10943090170621872, -0.10483911633491516, 0.08888904750347137, -0.043087687343358994, 0.05250230059027672, 0.04789923503994942, 0.11553752422332764, 0.12303321063518524, -0.056211117655038834, -0.10444624722003937, -0.07923433929681778, -0.028038673102855682, 0.1494743824005127, 0.06265263259410858, -0.06478343904018402, 0.10766060650348663, 0.04600922018289566, 0.002686477731913328, -0.07997617870569229, -0.010600384324789047, -0.06781688332557678, -0.011808752082288265, -0.07424808293581009, -0.046374183148145676, -0.004167289938777685, -0.080819271504879, -0.014231469482183456, -0.08545073866844177, -0.0017308773240074515, 0.08950617164373398, -0.025646822527050972, 0.013154360465705395, -0.0668012946844101, 0.04299204424023628, 0.014129756018519402, 0.018787913024425507, -0.21238809823989868, -0.10797328501939774, 0.03228830173611641, -0.1729067713022232, 0.04124774411320686, 0.035897403955459595, 0.011409321799874306, 0.0523943193256855, -0.009295208379626274, 0.024999745190143585, 0.02412502095103264, -0.007527884561568499, -0.012487879022955894, -0.14435380697250366, -0.06097172945737839, -0.08194433897733688, 0.06202647462487221, -0.1461615115404129, -0.014463500119745731, 0.06748760491609573, 0.16203245520591736, 0.01585104689002037, -0.08261172473430634, 0.06767234206199646, 0.015507848002016544, -0.03840363025665283, -0.051308758556842804, 0.006404743064194918, -0.023942980915308, 0.0253526009619236, 0.02340538613498211, -0.20066887140274048, -0.12577447295188904, 0.07297231256961823, 0.1087525337934494, -0.0566173754632473, -0.08446555584669113, -0.07556097954511642, -0.06886004656553268, -0.0814947858452797, -0.05538121238350868, 0.06902502477169037, 0.08255394548177719, 0.04709545522928238, -0.0643782913684845, -0.059652913361787796, 0.018638581037521362, 0.05997796356678009, -0.06307748705148697, 0.10295124351978302, 0.0758935958147049, -0.07611946016550064, 0.0998162105679512, -0.022418862208724022, 0.10768412798643112, 0.06989734619855881, 0.036376822739839554, -0.09190879017114639, 0.005065718665719032, 0.04801108315587044, 0.04691420495510101, 0.07095653563737869, -0.03794669732451439, 0.04143590107560158, 0.07676959782838821, 0.0032909337896853685, 0.03576260805130005, -0.053518351167440414, 0.03211745619773865, 0.04038482904434204, 0.002571446355432272, 0.029267573729157448, 0.009308191947638988, -0.00029417756013572216, 0.07208248227834702, 0.031279850751161575, 0.0930500328540802, -0.015387661755084991, -0.05654345452785492, -0.09876728057861328, 0.1412147432565689, -0.0802975669503212, -0.2760241627693176, -0.15295451879501343, -0.05233895033597946, -0.04954780265688896, -0.01927502639591694, 0.07076461613178253, -0.00589435501024127, -0.10072387009859085, -0.10658081620931625, 0.032630421221256256, 0.024595648050308228, -0.11793086677789688, -0.05796878784894943, 0.05878525972366333, 0.0137878293171525, -0.16182871162891388, 0.04186407849192619, 0.049038391560316086, -0.05801393836736679, -0.00036636675940826535, 0.09068232029676437, 0.12882374227046967, 0.07623829692602158, 0.061422817409038544, -0.03080929070711136, -0.010728506371378899, 0.1838022917509079, -0.10252782702445984, 0.030088821426033974, 0.10591010004281998, -0.06112899258732796, 0.07136692851781845, 0.1674691140651703, 0.017357291653752327, -0.09445668011903763, 0.055167242884635925, 0.09230265021324158, -0.06867285817861557, -0.2370721995830536, -0.11744958907365799, -0.02356107532978058, 0.01902456022799015, 0.11610260605812073, 0.06155795603990555, 0.01327226310968399, 0.012157939374446869, -0.11494892090559006, -0.01999780163168907, -0.05544879287481308, 0.07229769229888916, 0.05731411650776863, -0.005956819746643305, 0.04457387328147888, -0.03612314164638519, 0.022471589967608452, 0.11750742793083191, 0.034222595393657684, 0.1525179147720337, -0.04005128890275955, 0.17443019151687622, 0.09091325849294662, 0.09672220051288605, -0.033592481166124344, 0.04011215642094612, -0.006513644475489855, 0.06981469690799713, -0.013483770191669464, -0.10402718186378479, -0.052968356758356094, 0.10729006677865982, 0.014647175557911396, -0.06639758497476578, 0.019883597269654274, -0.06858883053064346, 0.035851411521434784, 0.20083196461200714, -0.018888071179389954, -0.14280778169631958, -0.06020130589604378, 0.054924070835113525, -0.02674904093146324, -0.075959712266922, -0.009657270275056362, 0.08115831762552261, -0.13936525583267212, 0.02873026765882969, -0.04181046411395073, 0.08192574232816696, -0.13368207216262817, -0.016189411282539368, -0.015597183257341385, 0.04092470929026604, 0.0009328301530331373, 0.11844126135110855, -0.14870469272136688, 0.10318487137556076, -0.00250951386988163, 0.010521410964429379, -0.10326659679412842, 0.040659334510564804, -0.04460075870156288, -0.04993978887796402, 0.1287613809108734, -0.013449739664793015, -0.11262821406126022, -0.060226500034332275, -0.11441860347986221, -0.002793449442833662, 0.0617215521633625, -0.10289692133665085, 0.10577772557735443, 0.03812602907419205, -0.02357422187924385, -0.02556454949080944, -0.01571153849363327, -0.11973726749420166, -0.22942589223384857, 0.10313652455806732, -0.10260285437107086, 0.06089498847723007, -0.06176593527197838, -0.04049233719706535, -0.035207320004701614, 0.15869580209255219, -0.09294059127569199, -0.05193851515650749, -0.11334705352783203, 0.027878528460860252, 0.17008548974990845, -0.04844165965914726, 0.0654086098074913, -0.034055981785058975, 0.17466014623641968, -0.009782273322343826, -0.0534665621817112, -0.0037168103735893965, -0.09658358246088028, -0.18531352281570435, -0.049290597438812256, 0.10991267114877701, 0.07689590007066727, 0.016304489225149155, -0.004168355371803045, 0.02504136599600315, 0.02641816809773445, -0.092446468770504, 0.03630887344479561, 0.13042497634887695, 0.11372220516204834, 0.03223033994436264, -0.01671006716787815, -0.08069825917482376, -0.10750746726989746, -0.09105624258518219, 0.06238476559519768, 0.15901221334934235, -0.07040619105100632, 0.17221276462078094, 0.14692525565624237, -0.09731971472501755, -0.18541032075881958, -0.06941203027963638, 0.02660069800913334, -0.022110333666205406, 0.12418832629919052, -0.19583424925804138, 0.07609493285417557, 0.06330393999814987, -0.02189747989177704, 0.10357189923524857, -0.24635234475135803, -0.13137663900852203, 0.02242734655737877, 0.034693192690610886, -0.22658716142177582, -0.16755333542823792, -0.10435584932565689, -0.03256973624229431, -0.13836313784122467, 0.12466910481452942, 0.015119410119950771, 0.020718231797218323, -0.020802922546863556, 0.06814616173505783, 0.056950028985738754, -0.06756013631820679, 0.12985099852085114, -0.01074366457760334, 0.017366304993629456, -0.11201153695583344, -0.011464809067547321, 0.00009987204248318449, -0.04799894243478775, 0.07751970738172531, 0.02513355016708374, 0.056007299572229385, -0.08825182169675827, -0.03465782478451729, -0.05454513058066368, 0.04653560370206833, -0.06688431650400162, -0.054129645228385925, -0.06657040864229202, 0.09082294255495071, 0.09364704042673111, -0.0030348938889801502, 0.0222152192145586, -0.033653419464826584, 0.04304803907871246, 0.22772547602653503, 0.11878020316362381, 0.040374331176280975, -0.1207028403878212, -0.030765624716877937, -0.015624558553099632, -0.0007671915809623897, -0.1373012810945511, 0.040455546230077744, 0.09428980946540833, 0.041930872946977615, 0.06910499930381775, -0.020049327984452248, -0.18569417297840118, 0.006416431628167629, 0.08990257233381271, -0.11722878366708755, -0.19837747514247894, 0.03250052034854889, 0.1450394093990326, -0.1635352373123169, -0.06792362034320831, 0.08642560988664627, 0.0028034013230353594, -0.02718227729201317, -0.0006079417071305215, 0.07530657202005386, 0.050494156777858734, 0.10072379559278488, 0.020223647356033325, 0.0542648620903492, -0.06394392997026443, 0.09787638485431671, 0.15273776650428772, -0.12501421570777893, 0.017528969794511795, 0.041233912110328674, -0.06672139465808868, -0.06570392847061157, 0.0011665642959997058, -0.011669792234897614, 0.02529226429760456, -0.04634234681725502, 0.02168339677155018, -0.0032991350162774324, 0.04767248034477234, 0.12678998708724976, 0.004707919899374247, 0.03604327142238617, 0.024924706667661667, -0.0008165554609149694, -0.07195913046598434, 0.09956450760364532, 0.026316313073039055, 0.04751897230744362, -0.04962509125471115, 0.022288380190730095, 0.015023470856249332, -0.008997608907520771, 0.01615404523909092, -0.041504476219415665, -0.06760221719741821, 0.0006113070994615555, -0.15726840496063232, 0.047037798911333084, -0.07609568536281586, 0.005286522675305605, -0.006197672802954912, -0.021127741783857346, -0.011343383230268955, 0.006440338678658009, -0.06437192857265472, -0.053995516151189804, -0.04210308939218521, 0.1224651038646698, -0.1918218731880188, -0.01119962241500616, 0.08923103660345078, -0.06265521049499512, 0.07389960438013077, -0.012120503932237625, -0.019899511709809303, 0.021092811599373817, -0.06998803466558456, -0.008857288397848606, -0.024028293788433075, 0.061860341578722, 0.010998960584402084, -0.13631665706634521, -0.021411584690213203, -0.002033786615356803, -0.08493684232234955, -0.010485285893082619, 0.04607241973280907, -0.16127906739711761, 0.031217334792017937, 0.07606573402881622, -0.03974664956331253, -0.05247551575303078, 0.03251763433218002, 0.037723246961832047, 0.008573171682655811, 0.09707523137331009, -0.006580837536603212, 0.046733953058719635, -0.1554250568151474, -0.05123760551214218, 0.002496997592970729, 0.0012571342522278428, 0.03417045995593071, 0.027168234810233116, 0.03268976882100105, -0.0027652252465486526, 0.22237153351306915, -0.009787006303668022, 0.059044644236564636, 0.030386632308363914, -0.03479677066206932, -0.03446007892489433, 0.0378115214407444, 0.03054657205939293, 0.01682179607450962, 0.01919659413397312, 0.02988573908805847, -0.020720677450299263, -0.0598786287009716, -0.031306590884923935, 0.06491969525814056, 0.15394698083400726, 0.1562308967113495, -0.04970363527536392, 0.07232186943292618, -0.15841986238956451, -0.05029395967721939, 0.030835121870040894, -0.03555865213274956, 0.06571172177791595, -0.07015949487686157, 0.044693466275930405, 0.06436637789011002, -0.09680860489606857, 0.14502322673797607, -0.06575098633766174, -0.030747199431061745, -0.03531266003847122, -0.14523671567440033, -0.04399600252509117, -0.002723619109019637, 0.007040428463369608, -0.09671179205179214, 0.10197228938341141, 0.13888782262802124, -0.01190971676260233, -0.007196548394858837, 0.0806342288851738, -0.05051081255078316, -0.04395172372460365, -0.0236266627907753, -0.001445562345907092, 0.007716743741184473, 0.00696518225595355, 0.07400790601968765, 0.013568125665187836, 0.07498584687709808, 0.05951060727238655, 0.09769705682992935, 0.03542337566614151, 0.01953490637242794, -0.03284003213047981, -0.0702316015958786, -0.0030873098876327276, -0.013872895389795303, -0.050436198711395264, 0.18928368389606476, 0.0534723624587059, 0.01421182882040739, 0.00616931077092886, 0.20994190871715546, -0.00036711146822199225, -0.05633899196982384, -0.130017951130867, 0.12741251289844513, -0.005223180167376995, 0.014218411408364773, 0.02094431407749653, -0.12873050570487976, 0.024687593802809715, 0.1633068323135376, 0.12099408358335495, 0.04460646212100983, 0.009315014816820621, 0.030408533290028572, 0.02228623442351818, -0.01826777495443821, 0.05022729933261871, 0.03698907047510147, 0.2083253115415573, -0.059289999306201935, 0.04932577162981033, -0.012821443378925323, -0.0019238018430769444, -0.022946815937757492, 0.08834380656480789, -0.025950796902179718, 0.019145308062434196, -0.06684698909521103, 0.0980745479464531, -0.051392000168561935, -0.24388068914413452, -0.02586691826581955, -0.09234051406383514, -0.13027362525463104, -0.02591422200202942, 0.021556347608566284, -0.018778275698423386, 0.04261225834488869, 0.02775716967880726, -0.02919122762978077, 0.1961386799812317, 0.0010961319785565138, -0.08347301185131073, -0.06155066564679146, 0.0603443942964077, -0.019925782456994057, 0.27089041471481323, -0.008155007846653461, 0.06977412849664688, 0.09647193551063538, -0.022062210366129875, -0.14391887187957764, -0.0013466778909787536, 0.0993686318397522, -0.04995501786470413, 0.05440150201320648, 0.1681664139032364, -0.03002307377755642, 0.14691053330898285, 0.03975291550159454, -0.03630441427230835, 0.05736718326807022, 0.07151702046394348, 0.047840334475040436, -0.09264572709798813, 0.07777377218008041, -0.07611901313066483, 0.1420774608850479, 0.11365186423063278, -0.03296096250414848, -0.0035068069119006395, -0.056144025176763535, 0.06404613703489304, -0.0335230715572834, 0.13062667846679688, -0.011994810774922371, -0.16007545590400696, 0.033460795879364014, 0.01280267909169197, 0.04707905650138855, -0.2401847541332245, -0.06351470947265625, 0.12365341186523438, -0.045873597264289856, 0.017377041280269623, 0.09005951881408691, 0.041898030787706375, 0.007047398015856743, -0.07079944759607315, -0.07969837635755539, -0.007302961312234402, 0.12063272297382355, -0.09274543076753616, -0.04902302846312523 ]
7d48965bc1421f07f3a663f0894820d7765b6ead
# Dataset Card for Welfare-QA ## Description 대한민국 보건복지부에서 발간하였으며 2023년 5월 11일에 [복지로](https://www.bokjiro.go.kr/ssis-tbu/index.do)에 등록된 안내책자를 바탕으로 만들어졌습니다. 총 413페이지의 비정형 PDF에 담긴 약 460여개의 복지제도에 대한 Question-Answering-Documents 데이터셋입니다. 원본은 다음 링크에서 확인해보실 수 있습니다. [👉 '2023 나에게 힘이되는 복지서비스 PDF 책자'](https://www.bokjiro.go.kr/ssis-tbu/twatxa/wlfarePr/selectWlfareSubMain.do?dmMnuParam=column27) ## Project Repo - Github Repo : [Ask-for-Welfare](https://github.com/ssisOneTeam/Ask-for-Welfare) ## How to Uses ```python >>> from datasets import load_dataset >>> dataset = load_dataset("Ash-Hun/Welfare-QA", split='train') >>> dataset Dataset({ features: ['Question', 'Answer', 'Documents'], num_rows: 9547 }) ``` ```python >>> dataset[0] {'Question': 'LPG 사용 가정의 고무호스를 교체하려면 어떤 지원을 받을 수 있나요?', 'Answer': 'LPG용기 사용가구 시설개선 사업을 통해 LPG 고무호스를 금속배관으로 교체하는 데 필요한 지원을 받으실 수 있습니다.', 'Documents': 'LPG용기 사용가구 시설개선'} ``` <!--- ### Contributors <table align="center"> <tr> <td align="center"> <a href="https://github.com/PangPangGod"> <img src="https://github.com/PangPangGod.png" width="100px;" alt="송준호"/><br /> <sub><b>송준호</b></sub> </a> </td> <td align="center"> <a href="https://github.com/ash-hun"> <img src="https://github.com/ash-hun.png" width="100px;" alt="최재훈"/><br /> <sub><b>최재훈</b></sub> </a> </td> <td align="center"> <a href="https://github.com/MoonHeesun"> <img src="https://github.com/MoonHeesun.png" width="100px;" alt="문희선"/><br /> <sub><b>문희선</b></sub> </a> </td> <td align="center"> <a href="https://github.com/Noveled"> <img src="https://github.com/Noveled.png" width="100px;" alt="김민식"/><br /> <sub><b>김민식</b></sub> </a> </td> <td align="center"> <a href="https://github.com/myeongjun1007"> <img src="https://github.com/myeongjun1007.png" width="100px;" alt="현명준"/><br /> <sub><b>현명준</b></sub> </a> </td> <td align="center"> <a href="https://github.com/kha-jaejun"> <img src="https://github.com/kha-jaejun.png" width="100px;" alt="가재준"/><br /> <sub><b>가재준</b></sub> </a> </td> </tr> </table> --->
Ash-Hun/Welfare-QA
[ "task_categories:question-answering", "language:ko", "license:mit", "Ask-for-Welfare", "WelSSiSKo", "region:us" ]
2024-01-25T15:02:32+00:00
{"language": ["ko"], "license": "mit", "task_categories": ["question-answering"], "pretty_name": "AskWelfare-v1.0", "dataset_info": {"features": [{"name": "Question", "dtype": "string"}, {"name": "Answer", "dtype": "string"}, {"name": "Documents", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 3207687, "num_examples": 9547}]}, "tags": ["Ask-for-Welfare", "WelSSiSKo"]}
2024-01-25T15:55:16+00:00
[]
[ "ko" ]
TAGS #task_categories-question-answering #language-Korean #license-mit #Ask-for-Welfare #WelSSiSKo #region-us
# Dataset Card for Welfare-QA ## Description 대한민국 보건복지부에서 발간하였으며 2023년 5월 11일에 복지로에 등록된 안내책자를 바탕으로 만들어졌습니다. 총 413페이지의 비정형 PDF에 담긴 약 460여개의 복지제도에 대한 Question-Answering-Documents 데이터셋입니다. 원본은 다음 링크에서 확인해보실 수 있습니다. '2023 나에게 힘이되는 복지서비스 PDF 책자' ## Project Repo - Github Repo : Ask-for-Welfare ## How to Uses
[ "# Dataset Card for Welfare-QA", "## Description\n\n대한민국 보건복지부에서 발간하였으며 2023년 5월 11일에 복지로에 등록된 안내책자를 바탕으로 만들어졌습니다.\n총 413페이지의 비정형 PDF에 담긴 약 460여개의 복지제도에 대한 Question-Answering-Documents 데이터셋입니다.\n원본은 다음 링크에서 확인해보실 수 있습니다. '2023 나에게 힘이되는 복지서비스 PDF 책자'", "## Project Repo\n\n- Github Repo : Ask-for-Welfare", "## How to Uses" ]
[ "TAGS\n#task_categories-question-answering #language-Korean #license-mit #Ask-for-Welfare #WelSSiSKo #region-us \n", "# Dataset Card for Welfare-QA", "## Description\n\n대한민국 보건복지부에서 발간하였으며 2023년 5월 11일에 복지로에 등록된 안내책자를 바탕으로 만들어졌습니다.\n총 413페이지의 비정형 PDF에 담긴 약 460여개의 복지제도에 대한 Question-Answering-Documents 데이터셋입니다.\n원본은 다음 링크에서 확인해보실 수 있습니다. '2023 나에게 힘이되는 복지서비스 PDF 책자'", "## Project Repo\n\n- Github Repo : Ask-for-Welfare", "## How to Uses" ]
[ 44, 9, 90, 18, 5 ]
[ "passage: TAGS\n#task_categories-question-answering #language-Korean #license-mit #Ask-for-Welfare #WelSSiSKo #region-us \n# Dataset Card for Welfare-QA## Description\n\n대한민국 보건복지부에서 발간하였으며 2023년 5월 11일에 복지로에 등록된 안내책자를 바탕으로 만들어졌습니다.\n총 413페이지의 비정형 PDF에 담긴 약 460여개의 복지제도에 대한 Question-Answering-Documents 데이터셋입니다.\n원본은 다음 링크에서 확인해보실 수 있습니다. '2023 나에게 힘이되는 복지서비스 PDF 책자'## Project Repo\n\n- Github Repo : Ask-for-Welfare## How to Uses" ]
[ -0.042100563645362854, 0.08227407932281494, -0.007575283292680979, 0.02087913639843464, 0.08283043652772903, 0.048086848109960556, 0.05039210617542267, 0.12053432315587997, 0.2035321295261383, 0.06446176022291183, 0.14188691973686218, 0.07234322279691696, 0.09326089173555374, 0.04994938522577286, -0.00950340274721384, -0.20012100040912628, 0.03988586738705635, 0.012651260010898113, 0.07354552298784256, 0.09366012364625931, 0.05167040601372719, -0.024851545691490173, 0.08084316551685333, 0.010468211956322193, -0.07430614531040192, 0.0667000561952591, -0.1440323442220688, -0.01983584836125374, 0.07983861118555069, -0.05181247368454933, 0.06000349670648575, 0.09520278126001358, 0.0023427088744938374, -0.08125259727239609, 0.02522556483745575, -0.07643460482358932, -0.06962309777736664, 0.028482194989919662, -0.07539276778697968, -0.09581340104341507, 0.09544000029563904, -0.07023462653160095, -0.04961389675736427, -0.004219639115035534, -0.06630727648735046, -0.14183172583580017, -0.057464733719825745, 0.038476891815662384, 0.07914911210536957, 0.031220845878124237, -0.03193272650241852, 0.12770289182662964, -0.142894446849823, 0.011565802618861198, 0.12727560102939606, -0.21703284978866577, -0.006562334951013327, 0.12091851979494095, 0.0937308520078659, 0.0011593620292842388, -0.031481917947530746, 0.039789680391550064, 0.057321056723594666, 0.0114359762519598, -0.07203593850135803, -0.15350006520748138, -0.11036541312932968, 0.032061830163002014, -0.06765352934598923, 0.019730480387806892, 0.3021663427352905, 0.07003382593393326, -0.07077359408140182, -0.08355296403169632, 0.012247171252965927, 0.02794554829597473, 0.038730427622795105, 0.0010023327777162194, -0.013552386313676834, -0.03816039115190506, 0.07183524966239929, -0.06629573553800583, -0.05882320925593376, -0.12185528874397278, -0.10586201399564743, 0.09252624213695526, 0.028721434995532036, -0.006533368956297636, 0.025016220286488533, 0.03677770122885704, -0.03921202942728996, -0.06010434404015541, -0.12237914651632309, -0.04960570111870766, -0.06190827116370201, 0.036815207451581955, 0.055866967886686325, -0.05568038672208786, 0.1563849151134491, 0.11920340359210968, 0.11259003728628159, 0.02507791668176651, -0.13087742030620575, 0.09148529171943665, 0.2314184606075287, 0.051781825721263885, -0.055942144244909286, -0.03427698835730553, -0.07074571400880814, 0.023782875388860703, 0.009929639287292957, -0.03351617231965065, -0.11977674067020416, 0.025417229160666466, -0.050514012575149536, 0.15609657764434814, 0.037656866014003754, 0.04402441531419754, -0.039654556661844254, 0.002179914154112339, 0.19028377532958984, -0.02751588076353073, -0.06979748606681824, -0.009609893895685673, -0.08548147231340408, -0.04655354470014572, -0.07691534608602524, 0.09137279540300369, -0.013486051931977272, 0.051239002496004105, -0.00654590455815196, -0.04880806431174278, 0.01832367293536663, -0.022852057591080666, 0.03318193927407265, -0.14520910382270813, 0.09735505282878876, -0.10409224033355713, -0.24305987358093262, -0.062369413673877716, -0.032225750386714935, -0.07188605517148972, 0.033227384090423584, -0.02732851356267929, -0.006323714274913073, -0.09817369282245636, -0.0518181212246418, 0.12816207110881805, -0.020882900804281235, 0.0973273366689682, -0.09181144833564758, 0.04049471393227577, -0.061106499284505844, -0.023708563297986984, -0.10999946296215057, 0.0252644345164299, -0.049273423850536346, 0.013531285338103771, -0.13949726521968842, -0.017808973789215088, -0.06806501001119614, -0.014169543981552124, 0.013269513845443726, 0.06668967753648758, -0.06361067295074463, 0.15111933648586273, -0.023439934477210045, -0.0008957398240454495, 0.0026892295572906733, -0.09734314680099487, -0.20573833584785461, 0.1003490760922432, 0.015969686210155487, 0.23656406998634338, 0.005513358861207962, 0.36029693484306335, 0.047722939401865005, -0.14038468897342682, -0.07426510006189346, 0.07137100398540497, -0.15536928176879883, -0.07281287014484406, 0.11115983873605728, -0.015506132505834103, -0.03960609808564186, 0.01464088261127472, -0.12329977005720139, -0.04028213396668434, -0.008132952265441418, -0.04580727592110634, 0.041911881417036057, -0.096397764980793, 0.015802768990397453, 0.034044552594423294, 0.01083248108625412, -0.023215709254145622, 0.03867124393582344, -0.08551880717277527, 0.023898854851722717, -0.0034577392507344484, -0.07537199556827545, -0.13522763550281525, 0.08931883424520493, 0.06020770221948624, -0.006157797761261463, -0.039906855672597885, 0.019952185451984406, -0.004995121620595455, 0.04199148714542389, 0.044908422976732254, 0.13595281541347504, -0.003343872958794236, -0.06757769733667374, -0.04660521075129509, 0.003438975429162383, 0.03745051845908165, 0.03838466852903366, 0.009809299372136593, -0.02996111661195755, 0.011071119457483292, -0.0172231774777174, 0.0026066675782203674, 0.04421601817011833, -0.0061459019780159, 0.10929004102945328, 0.03816718980669975, -0.03103090636432171, 0.059798676520586014, 0.15227000415325165, 0.10478486865758896, 0.05150698497891426, 0.051390111446380615, 0.02651255577802658, 0.0038786116056144238, -0.04560987651348114, 0.10723497718572617, 0.02067621238529682, -0.008303267881274223, 0.05413633584976196, 0.05353270471096039, -0.044719867408275604, -0.038936663419008255, -0.022188857197761536, 0.012613952159881592, -0.12344435602426529, -0.04838418588042259, 0.07539109885692596, -0.043129608035087585, 0.04404661804437637, -0.0652260109782219, -0.02586115337908268, 0.02739070914685726, -0.0410069040954113, -0.021240264177322388, 0.10542162507772446, 0.11629655957221985, -0.22560033202171326, 0.07567533105611801, -0.014387265779078007, 0.055091492831707, 0.25615790486335754, -0.01743580587208271, 0.0008670445531606674, -0.05090697109699249, 0.10235331952571869, -0.056629329919815063, 0.19452711939811707, -0.1673317849636078, 0.0016109226271510124, 0.06505824625492096, 0.010453996248543262, 0.03314926102757454, -0.052094198763370514, -0.13732193410396576, -0.057085540145635605, -0.02556559629738331, -0.14616328477859497, 0.10088589787483215, 0.015843667089939117, 0.0300440676510334, 0.020039405673742294, 0.03198148310184479, 0.03296258673071861, -0.01057770848274231, -0.03863019496202469, 0.1053600013256073, -0.007718436419963837, -0.1761975735425949, 0.007738533895462751, -0.028416788205504417, 0.08958160877227783, -0.03109004534780979, 0.0644979327917099, -0.09500830620527267, -0.035726625472307205, 0.00711565837264061, 0.03872685879468918, -0.08987690508365631, -0.13700555264949799, -0.007732348516583443, 0.055207014083862305, -0.12848466634750366, -0.03173675015568733, -0.025927169248461723, -0.05751475691795349, -0.0019728399347513914, 0.14372284710407257, -0.16296382248401642, 0.11778292059898376, -0.05882296338677406, 0.06289291381835938, 0.03376782312989235, -0.08637690544128418, 0.1360461413860321, -0.138187974691391, 0.10401947051286697, 0.05487493798136711, 0.0058552538976073265, 0.0011936207301914692, 0.11937911063432693, 0.017155056819319725, -0.061693694442510605, 0.039175208657979965, 0.03710620850324631, -0.09961066395044327, -0.23903371393680573, -0.043186455965042114, 0.0018267459236085415, 0.19942010939121246, 0.005318815354257822, 0.051582060754299164, 0.05711983144283295, 0.16900895535945892, 0.044577181339263916, -0.030330605804920197, -0.14888949692249298, 0.023655183613300323, 0.04345140978693962, -0.018486807122826576, 0.04667770117521286, -0.09498995542526245, -0.018083395436406136, 0.09809952974319458, 0.06260334700345993, 0.1280653476715088, 0.09684748947620392, -0.03311733156442642, 0.09263758361339569, 0.21421930193901062, 0.02983490377664566, 0.0272268895059824, -0.019611410796642303, -0.02509581856429577, -0.07817365974187851, -0.006018121726810932, -0.0018304104451090097, 0.0482766255736351, 0.05022560432553291, -0.14900748431682587, 0.033164702355861664, -0.05493796989321709, 0.0523778535425663, 0.03480053320527077, 0.053617820143699646, 0.11784590035676956, 0.056536294519901276, 0.0222206749022007, 0.01991654746234417, -0.03985811024904251, 0.07570157945156097, 0.0836593359708786, -0.19155281782150269, 0.06437347829341888, 0.006316801067441702, 0.09342924505472183, -0.05820917710661888, 0.019683463498950005, -0.12720786035060883, -0.16171938180923462, 0.00219159759581089, 0.10589075833559036, -0.11984092742204666, 0.23987619578838348, 0.043158452957868576, 0.009348196908831596, -0.18409426510334015, -0.02768286131322384, -0.02730974368751049, 0.04100028797984123, 0.24049289524555206, 0.010021918453276157, 0.055226754397153854, -0.0805554986000061, 0.03811371326446533, 0.08660118281841278, 0.057787708938121796, -0.12733975052833557, -0.0254006776958704, 0.06360417604446411, 0.03323087468743324, -0.11639758944511414, -0.07120925933122635, 0.014312430284917355, 0.013819198124110699, 0.03210058435797691, 0.03228430822491646, 0.13876654207706451, 0.04839683324098587, -0.0027325483970344067, -0.18747876584529877, 0.10272599011659622, -0.11657857149839401, -0.11199896037578583, -0.053788889199495316, -0.01315657701343298, 0.053680405020713806, -0.03287731111049652, -0.0856378898024559, 0.0040291729383170605, -0.14988233149051666, -0.004289855714887381, -0.05444687232375145, 0.06271903216838837, -0.05380207672715187, -0.09831122308969498, 0.006405774038285017, 0.07504767179489136, 0.03705783933401108, 0.017349321395158768, 0.013135436922311783, 0.03594660758972168, -0.06234480068087578, -0.16950015723705292, 0.13312041759490967, -0.1991184800863266, 0.1342620551586151, 0.14321286976337433, -0.020975477993488312, 0.004330106545239687, -0.03869600221514702, -0.11223910748958588, 0.09480608999729156, 0.1398438960313797, -0.039897702634334564, 0.007272644434124231, 0.19871258735656738, 0.014433077536523342, -0.20117375254631042, -0.06711627542972565, -0.024531101807951927, 0.010023602284491062, 0.03781372308731079, -0.22154344618320465, 0.009352970868349075, 0.2116403579711914, -0.05047580972313881, -0.07679983973503113, -0.13835889101028442, -0.0578012615442276, 0.08119570463895798, -0.031507931649684906, 0.10726592689752579, -0.10109667479991913, -0.06800618767738342, 0.13402718305587769, -0.23878084123134613, 0.12692898511886597, -0.13980455696582794, 0.033109523355960846, -0.023950835689902306, 0.12948258221149445, -0.013954125344753265, -0.012322225607931614, 0.19955453276634216, 0.02151983417570591, -0.0016197317745536566, -0.043271731585264206, -0.14131370186805725, 0.0558888204395771, 0.008505088277161121, 0.11830282211303711, -0.08681999891996384, 0.059280168265104294, -0.20328503847122192, -0.026310425251722336, -0.09266405552625656, 0.054984819144010544, 0.003341793781146407, -0.1261008232831955, -0.15940260887145996, 0.09101087599992752, -0.0808892697095871, 0.011999305337667465, 0.155459463596344, -0.12121632695198059, 0.008291413076221943, 0.09848769009113312, 0.15018483996391296, -0.024412523955106735, 0.06048145145177841, -0.06089375913143158, -0.09778621792793274, 0.0842779129743576, -0.11128793656826019, -0.04839220643043518, 0.09692277014255524, 0.048299212008714676, 0.08340559154748917, -0.058433420956134796, -0.10493168234825134, 0.1195061057806015, 0.0453287735581398, -0.0034592633601278067, -0.21430586278438568, 0.017229652032256126, -0.08662581443786621, 0.031676795333623886, 0.011033782735466957, 0.011356235481798649, 0.02509092539548874, -0.0151134692132473, 0.04291949048638344, -0.04882286116480827, -0.0024817727971822023, -0.013036216609179974, 0.1197296679019928, 0.03832881152629852, -0.09664025902748108, 0.02961752377450466, 0.026091646403074265, 0.006193848792463541, 0.04191063344478607, 0.043376535177230835, -0.10488083213567734, -0.07435715198516846, -0.14632174372673035, 0.08940167725086212, -0.0459124930202961, -0.09772714227437973, 0.0007876728777773678, -0.0240336861461401, -0.06148429214954376, 0.07999752461910248, 0.011530409567058086, -0.007339677307754755, 0.09555620700120926, 0.01401450578123331, -0.04669181630015373, -0.02256285585463047, -0.11898598819971085, -0.007190155331045389, -0.03802923113107681, -0.15784671902656555, 0.039107028394937515, 0.19087904691696167, -0.07379379868507385, -0.030239198356866837, -0.09471150487661362, 0.03437370806932449, -0.06767915934324265, 0.026530781760811806, -0.1476231813430786, -0.040800586342811584, -0.010806241072714329, -0.1492013931274414, -0.11783422529697418, -0.004427922889590263, -0.06372283399105072, 0.08373655378818512, 0.07594321668148041, 0.06143810227513313, -0.07282676547765732, -0.012742136605083942, 0.13668105006217957, 0.02545008435845375, 0.09411466866731644, 0.008860507979989052, -0.05095047876238823, 0.1098640188574791, 0.011075376532971859, 0.07831646502017975, 0.037430908530950546, 0.05563293397426605, 0.05902385339140892, 0.06193417310714722, -0.06347271800041199, 0.006001877598464489, 0.040758635848760605, 0.11171864718198776, 0.05907062068581581, -0.08593443781137466, 0.1143874004483223, 0.02026107907295227, -0.08777319639921188, -0.0766545906662941, 0.06257039308547974, 0.022720102220773697, -0.013708321377635002, 0.05796739459037781, 0.01801164448261261, 0.01911759190261364, -0.04448959231376648, -0.011535336263477802, 0.006599212531000376, -0.017335282638669014, 0.021216416731476784, -0.01737402193248272, -0.012143934145569801, -0.1039169430732727, 0.28469640016555786, 0.004892203025519848, -0.07254736870527267, 0.024122903123497963, 0.12833920121192932, 0.03877304494380951, -0.08018452674150467, 0.11660655587911606, 0.05559573322534561, 0.004169450607150793, -0.10638133436441422, -0.012726309709250927, -0.13414737582206726, -0.15821179747581482, 0.06927307695150375, 0.07395754009485245, 0.16458649933338165, -0.05233893170952797, -0.00419333390891552, -0.05252276360988617, 0.028246143832802773, -0.016447056084871292, -0.05555645748972893, 0.0004880107881035656, 0.01489714253693819, -0.003953351639211178, 0.13843637704849243, 0.0055563743226230145, 0.05338185280561447, 0.055684320628643036, -0.03879003971815109, -0.09075398743152618, -0.07032237946987152, -0.038488999009132385, -0.07401271164417267, 0.08374259620904922, -0.05572132021188736, 0.05457600951194763, 0.0999620109796524, 0.047751910984516144, 0.028647372499108315, 0.11143555492162704, -0.03689572215080261, -0.033683061599731445, 0.05555330961942673, -0.03494126722216606, 0.03941046819090843, -0.1449851393699646, 0.050187524408102036, 0.025449275970458984, 0.05241817235946655, 0.009946477599442005, 0.06514324992895126, -0.010991362854838371, -0.09588857740163803, -0.1868315041065216, -0.04273347184062004, -0.053033508360385895, 0.07307200878858566, -0.05708572641015053, 0.07409341633319855, -0.0017713146517053246, 0.03835364803671837, 0.023197542876005173, 0.19790658354759216, 0.04985905811190605, -0.03720896691083908, -0.13749779760837555, 0.03750864416360855, -0.07317926734685898, -0.008022650144994259, 0.025851109996438026, -0.06721197068691254, -0.005055817309767008, 0.26536479592323303, 0.2204395830631256, 0.03555413708090782, 0.028094235807657242, -0.00013796235725749284, 0.016245407983660698, 0.028479455038905144, -0.0009723045513965189, 0.005436010658740997, 0.06317798793315887, -0.04134462773799896, -0.006570052355527878, -0.06626570969820023, -0.022336149588227272, -0.031047837808728218, 0.02763984352350235, 0.07788512855768204, -0.1513698846101761, -0.09949744492769241, 0.14482146501541138, -0.14790445566177368, 0.0042357053607702255, 0.02368701621890068, -0.060030121356248856, -0.15072496235370636, -0.004680113401263952, 0.04767011106014252, 0.10002544522285461, -0.0066737644374370575, 0.011383593082427979, 0.0019482274074107409, -0.06811796873807907, 0.045381899923086166, -0.10981102287769318, -0.05165278539061546, 0.06453076750040054, 0.04126681759953499, 0.12940534949302673, 0.006666635628789663, 0.07056587189435959, 0.02231413871049881, -0.06363865733146667, -0.0807976946234703, 0.11362408846616745, 0.09892863035202026, -0.014359642751514912, -0.009764467366039753, 0.012078620493412018, 0.044021379202604294, 0.010206072591245174, 0.0729583129286766, -0.012528520077466965, 0.0012161601334810257, 0.06491390615701675, 0.04860908165574074, -0.12449564039707184, 0.15623848140239716, -0.09028061479330063, 0.06991732865571976, 0.05806548148393631, 0.017515666782855988, 0.002257970394566655, -0.0009645083919167519, 0.08692241460084915, 0.004912243224680424, -0.034028489142656326, -0.051391005516052246, -0.023303909227252007, -0.023571159690618515, 0.10524312406778336, -0.024950234219431877, -0.14722605049610138, 0.07523228228092194, -0.06399456411600113, 0.020799437537789345, 0.03490000218153, -0.019014300778508186, 0.015336291864514351, 0.005195720586925745, 0.011123891919851303, -0.1180395558476448, 0.03235061839222908, -0.01173096988350153, -0.0746290311217308, -0.001213721581734717 ]
80ad81f74f95588572cf16c0d19d4a1ea41fa756
## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** @INPROCEEDINGS{8946435, author={Güven, Zekeriya Anıl and Diri, Banu and Çakaloğlu, Tolgahan}, booktitle={2019 Innovations in Intelligent Systems and Applications Conference (ASYU)}, title={Comparison Method for Emotion Detection of Twitter Users}, year={2019}, volume={}, number={}, pages={1-5}, keywords={Twitter;Resource management;Machine learning algorithms;Computer science;Media;Advertising;Topic Modelling;Latent Dirichlet Allocation;Natural Language Processing;Emotion Detection;Sentiment Analysis;Machine Learning}, doi={10.1109/ASYU48272.2019.8946435}} **APA:** Güven, Z. A., Diri, B., & Çakaloğlu, T. (2019, October). Comparison Method for Emotion Detection of Twitter Users. In 2019 Innovations in Intelligent Systems and Applications Conference (ASYU) (pp. 1-5). IEEE.
anilguven/turkish_tweet_emotion_dataset
[ "task_categories:text-classification", "size_categories:1K<n<10K", "language:tr", "license:unknown", "tweet", "turkish", "sentiment", "emotion", "region:us" ]
2024-01-25T15:12:58+00:00
{"language": ["tr"], "license": "unknown", "size_categories": ["1K<n<10K"], "task_categories": ["text-classification"], "tags": ["tweet", "turkish", "sentiment", "emotion"]}
2024-02-13T21:29:52+00:00
[]
[ "tr" ]
TAGS #task_categories-text-classification #size_categories-1K<n<10K #language-Turkish #license-unknown #tweet #turkish #sentiment #emotion #region-us
[optional] BibTeX: @INPROCEEDINGS{8946435, author={Güven, Zekeriya Anıl and Diri, Banu and Çakaloğlu, Tolgahan}, booktitle={2019 Innovations in Intelligent Systems and Applications Conference (ASYU)}, title={Comparison Method for Emotion Detection of Twitter Users}, year={2019}, volume={}, number={}, pages={1-5}, keywords={Twitter;Resource management;Machine learning algorithms;Computer science;Media;Advertising;Topic Modelling;Latent Dirichlet Allocation;Natural Language Processing;Emotion Detection;Sentiment Analysis;Machine Learning}, doi={10.1109/ASYU48272.2019.8946435}} APA: Güven, Z. A., Diri, B., & Çakaloğlu, T. (2019, October). Comparison Method for Emotion Detection of Twitter Users. In 2019 Innovations in Intelligent Systems and Applications Conference (ASYU) (pp. 1-5). IEEE.
[]
[ "TAGS\n#task_categories-text-classification #size_categories-1K<n<10K #language-Turkish #license-unknown #tweet #turkish #sentiment #emotion #region-us \n" ]
[ 53 ]
[ "passage: TAGS\n#task_categories-text-classification #size_categories-1K<n<10K #language-Turkish #license-unknown #tweet #turkish #sentiment #emotion #region-us \n" ]
[ 0.017398159950971603, -0.12365781515836716, -0.006966187618672848, 0.0052015092223882675, 0.13016384840011597, 0.10777255892753601, 0.1657816618680954, 0.08357974886894226, 0.17242999374866486, 0.011057986877858639, 0.08417686820030212, 0.00475701130926609, 0.043252717703580856, -0.010609683580696583, -0.055978480726480484, -0.29616209864616394, -0.021099278703331947, -0.045013826340436935, 0.16623276472091675, 0.10882420092821121, 0.10521350800991058, -0.004727204330265522, 0.08081541210412979, -0.06071474775671959, -0.08321525156497955, 0.09522725641727448, 0.009444612078368664, -0.06628292798995972, 0.09726560860872269, -0.02811018005013466, 0.05240800604224205, -0.01188189722597599, -0.07642693817615509, -0.19191989302635193, 0.04386955499649048, -0.019918005913496017, -0.03495503589510918, -0.0005419154767878354, 0.10493339598178864, -0.18256936967372894, 0.24407753348350525, -0.19030603766441345, -0.0648331344127655, 0.08122315257787704, -0.11266320943832397, -0.14697059988975525, -0.04269200563430786, 0.17600680887699127, 0.019295504316687584, 0.0479896180331707, -0.052828025072813034, 0.1184564083814621, -0.09232080727815628, 0.04350151866674423, 0.16024088859558105, -0.2037627398967743, -0.0370422899723053, -0.023474089801311493, -0.023716799914836884, 0.08820731192827225, -0.03114365227520466, 0.09109628200531006, 0.028431950137019157, -0.028935996815562248, -0.1764102280139923, -0.09406827390193939, -0.044983699917793274, -0.10465674102306366, 0.042957499623298645, -0.05460266396403313, 0.21512548625469208, 0.10922946780920029, 0.08167815953493118, -0.14198049902915955, -0.007117539178580046, -0.10649927705526352, -0.025085441768169403, 0.05884263291954994, -0.023587100207805634, 0.08351696282625198, 0.10801292210817337, 0.05586910992860794, -0.11832474172115326, 0.05604144558310509, -0.20513685047626495, 0.1377895623445511, -0.025845909491181374, 0.015935013070702553, -0.037185367196798325, 0.008966358378529549, -0.14539556205272675, -0.07135933637619019, 0.06886786967515945, -0.02239939011633396, -0.059280119836330414, 0.009061143733561039, 0.03464335575699806, -0.0204427819699049, 0.08476453274488449, -0.1063319593667984, -0.02097485214471817, 0.03556791692972183, 0.010855086147785187, 0.11978907138109207, 0.09260383248329163, 0.03575456887483597, 0.0322965607047081, -0.11079207062721252, -0.1378583461046219, -0.15911813080310822, 0.03456873819231987, 0.0016695307567715645, -0.08711376786231995, -0.028580373153090477, -0.0767744779586792, 0.13305585086345673, -0.021705344319343567, 0.11104445159435272, -0.033095091581344604, 0.053760822862386703, -0.08112186193466187, -0.01703477092087269, -0.03994306921958923, 0.007471771910786629, -0.054879359900951385, 0.1393531858921051, -0.18028011918067932, -0.04841885715723038, 0.017056234180927277, 0.08283279836177826, 0.018994756042957306, 0.06308357417583466, 0.02362196333706379, -0.019651120528578758, 0.12682709097862244, -0.1256713569164276, 0.03342713043093681, -0.12772753834724426, -0.12462159246206284, -0.01663292571902275, -0.008248456753790379, -0.08437006175518036, 0.04203024506568909, -0.010886706411838531, -0.0017365905223414302, 0.08388463407754898, -0.024598535150289536, -0.030253345146775246, -0.07326070219278336, 0.10099136829376221, -0.0105350436642766, 0.08542055636644363, -0.04897966608405113, -0.002577699488028884, -0.17918488383293152, 0.06178859993815422, -0.020640896633267403, 0.07927466183900833, -0.09144999086856842, 0.2960125505924225, -0.04711895063519478, 0.006481410935521126, -0.06571768969297409, -0.033964574337005615, -0.16407525539398193, 0.13898926973342896, -0.21447467803955078, -0.08743482083082199, 0.06490641087293625, -0.008597790263593197, -0.07161155343055725, 0.09813578426837921, 0.05933976545929909, 0.16785144805908203, 0.13748252391815186, 0.43677040934562683, -0.0413287915289402, 0.053155217319726944, -0.15833517909049988, 0.17623873054981232, -0.11543837934732437, 0.06073734909296036, 0.08052220195531845, -0.016858359798789024, 0.09025200456380844, 0.04431074485182762, 0.1942867934703827, 0.11801958829164505, -0.024765996262431145, -0.0762651115655899, 0.024359649047255516, 0.0036298204213380814, 0.11950617283582687, 0.033408842980861664, 0.08330781757831573, -0.20494326949119568, -0.04346514865756035, -0.15800781548023224, 0.05975427106022835, 0.05409093201160431, -0.04451976343989372, -0.036417290568351746, 0.05466341972351074, 0.06534109264612198, 0.01300786342471838, -0.07358788698911667, 0.042802926152944565, -0.07887072116136551, 0.13233919441699982, 0.15610630810260773, 0.11819077283143997, 0.04286462441086769, -0.17150793969631195, -0.0621039979159832, 0.07693783193826675, 0.07775972783565521, 0.04029469192028046, -0.04921165853738785, -0.18920893967151642, 0.15408016741275787, 0.013552848249673843, 0.06458347290754318, -0.01203338522464037, 0.01047283224761486, 0.21287690103054047, 0.05461627244949341, -0.030014222487807274, 0.08908891677856445, -0.08350012451410294, 0.038763463497161865, -0.004473037552088499, -0.07410141825675964, 0.06925857067108154, -0.10321607440710068, -0.0956343486905098, 0.13560913503170013, -0.1662244200706482, 0.06084885075688362, 0.18142390251159668, -0.031221570447087288, -0.0652606263756752, 0.025061339139938354, -0.04699268564581871, 0.03480174019932747, 0.11161798238754272, 0.03599189966917038, 0.022255025804042816, -0.015837691724300385, 0.03521540388464928, -0.008800285868346691, -0.083943210542202, 0.0181862972676754, -0.02697477675974369, -0.08116738498210907, 0.1218629702925682, -0.010666870512068272, -0.29252558946609497, 0.20685674250125885, 0.2648673951625824, 0.13378676772117615, 0.28859761357307434, -0.0051854876801371574, 0.04937209188938141, -0.039454370737075806, -0.012703395448625088, -0.0678599625825882, 0.11895167827606201, -0.22764550149440765, -0.02168702706694603, 0.02561929076910019, 0.01526329293847084, 0.01665765419602394, -0.029040880501270294, -0.10165411978960037, -0.033296678215265274, -0.049026090651750565, -0.07572543621063232, 0.09840275347232819, -0.004272430203855038, 0.11716542392969131, 0.0029926598072052, 0.05299832671880722, 0.037684760987758636, 0.011356447823345661, -0.09689433127641678, 0.030791709199547768, -0.16214154660701752, -0.27690061926841736, -0.030405789613723755, 0.023843618109822273, -0.025772759690880775, -0.012501120567321777, 0.05536077171564102, -0.21999405324459076, 0.05766287073493004, 0.014635700732469559, 0.1497182548046112, -0.08678459376096725, -0.061973799020051956, -0.07504698634147644, 0.06808368116617203, -0.1162344440817833, -0.043845828622579575, -0.06148733198642731, -0.057049334049224854, -0.026643289253115654, 0.04609468951821327, -0.09686285257339478, 0.011651434004306793, 0.2201596349477768, -0.04980260878801346, 0.012471386231482029, -0.1525139957666397, 0.056039098650217056, -0.23698647320270538, 0.034155458211898804, -0.050996940582990646, 0.06971541792154312, 0.03893355652689934, 0.26161786913871765, 0.02314601093530655, -0.09648934751749039, -0.02018258161842823, 0.09476999938488007, -0.04928898066282272, -0.2300662100315094, -0.10984191298484802, -0.06509286165237427, 0.2404872626066208, -0.08342523127794266, 0.0714188739657402, 0.11329670250415802, 0.050074826925992966, -0.08029027283191681, -0.09570103138685226, -0.06519827991724014, 0.0133631881326437, 0.24986393749713898, -0.04310847818851471, -0.032938044518232346, -0.15476052463054657, -0.027431849390268326, 0.20116005837917328, 0.02141452394425869, 0.04289188235998154, 0.11957957595586777, 0.19321119785308838, 0.008561762049794197, 0.09074708074331284, 0.0011230369564145803, -0.02280968800187111, 0.01012119185179472, -0.02950078807771206, -0.058572735637426376, -0.009552447125315666, 0.03376409411430359, 0.0067824688740074635, 0.02252456173300743, -0.0328679159283638, -0.05190378800034523, -0.06699564307928085, 0.23888905346393585, 0.05994892120361328, 0.04741983488202095, -0.00789585430175066, 0.005884114652872086, 0.04825688153505325, -0.08128803968429565, 0.005500664934515953, 0.01883154921233654, 0.11042606830596924, -0.11878269910812378, 0.17487947642803192, 0.011289572343230247, 0.10196863859891891, -0.015969589352607727, 0.06775761395692825, -0.17251677811145782, -0.18271106481552124, -0.006520511116832495, 0.1837729811668396, -0.1190272644162178, 0.28551289439201355, 0.0692775622010231, -0.017429841682314873, -0.15229132771492004, -0.09702595323324203, 0.08868660777807236, 0.12792915105819702, 0.038500964641571045, 0.06879165768623352, 0.03920862823724747, -0.11360326409339905, -0.09348195046186447, -0.00520052807405591, 0.1231335997581482, -0.04302430525422096, -0.08385857939720154, 0.02652137354016304, 0.03410419821739197, -0.020405473187565804, -0.0014462544349953532, -0.10731718689203262, -0.10303685069084167, 0.0435531884431839, 0.12203215807676315, -0.08500523120164871, 0.07344628125429153, -0.007340168580412865, -0.022686151787638664, 0.12111064791679382, -0.08423203229904175, -0.03422165662050247, -0.07291615009307861, -0.070319764316082, -0.025727147236466408, -0.03458461910486221, -0.11778546124696732, -0.01763634942471981, -0.116116464138031, -0.10170984268188477, -0.1145251989364624, 0.11308856308460236, -0.01287831086665392, -0.10384378582239151, -0.050862740725278854, 0.1308545619249344, -0.012355415150523186, 0.12303009629249573, 0.0037234153132885695, -0.05045824870467186, -0.0146882813423872, -0.12255585938692093, 0.06876764446496964, -0.10622834414243698, -0.11457359045743942, 0.0942961722612381, 0.11658251285552979, -0.09178674966096878, -0.10364911705255508, -0.06025597080588341, 0.14626315236091614, 0.29419681429862976, -0.05026663467288017, 0.15901686251163483, 0.042800698429346085, -0.02189204841852188, -0.19180652499198914, -0.00823918730020523, -0.12607765197753906, -0.012646581046283245, 0.09701740741729736, -0.04449379816651344, 0.009964101016521454, 0.07203497737646103, -0.02103528380393982, 0.10190169513225555, -0.3199063241481781, -0.0428473986685276, 0.08696726709604263, -0.0784531906247139, 0.26901307702064514, -0.16030077636241913, -0.04316548630595207, -0.11868955940008163, 0.06630919128656387, 0.09433411061763763, -0.1508598029613495, 0.05461917072534561, -0.012525016441941261, 0.2438328117132187, 0.012750621885061264, 0.08334074169397354, 0.21948598325252533, 0.07411078363656998, 0.07293722778558731, -0.1476772576570511, -0.1865588128566742, 0.19511888921260834, 0.01531646866351366, -0.027104541659355164, -0.1876303255558014, -0.043872833251953125, -0.19176141917705536, -0.05034872516989708, -0.13004785776138306, 0.10693936794996262, 0.0022120277862995863, -0.041646476835012436, -0.1602642983198166, 0.0057907141745090485, -0.05580201372504234, -0.030358320102095604, 0.038968704640865326, -0.04375452175736427, -0.06790166348218918, -0.07838289439678192, 0.1460295170545578, -0.01370272971689701, 0.045055050402879715, -0.04174889251589775, -0.03484911844134331, 0.02900042198598385, -0.16494722664356232, -0.08815415948629379, 0.09833347052335739, -0.02726803347468376, 0.053269270807504654, -0.012972285971045494, -0.1281026303768158, 0.10507399588823318, 0.15260323882102966, 0.003948890138417482, -0.21723049879074097, -0.05631288141012192, -0.09893175959587097, 0.08129359036684036, -0.10938771814107895, -0.010665416717529297, 0.011278748512268066, 0.03068552538752556, 0.015513134188950062, -0.07121104747056961, -0.05136633291840553, 0.028969280421733856, -0.018862875178456306, -0.04639912396669388, -0.08645123988389969, 0.10938417911529541, 0.06225503981113434, -0.19735831022262573, 0.020775508135557175, 0.19165663421154022, -0.08823256194591522, -0.03242719918489456, -0.001571530825458467, 0.12776045501232147, 0.047539085149765015, -0.0517013855278492, 0.004528982564806938, -0.14767035841941833, -0.0060975090600550175, 0.15912844240665436, -0.013141850009560585, 0.06303127855062485, -0.033746302127838135, -0.009882896207273006, 0.10115113854408264, 0.022370437160134315, 0.043915171176195145, -0.04706623777747154, -0.12407652288675308, -0.016448955982923508, -0.0063664717599749565, 0.08761610835790634, -0.031068887561559677, -0.02584664151072502, -0.032791052013635635, -0.04429994150996208, -0.1293487399816513, -0.03079119138419628, -0.11127272248268127, -0.0120778139680624, 0.08377162367105484, 0.03563553839921951, -0.0015566004440188408, -0.17120274901390076, -0.09391757845878601, 0.05409364774823189, 0.0629255399107933, 0.17050060629844666, -0.08758744597434998, 0.006696891970932484, 0.04730033129453659, 0.02164551429450512, 0.1835031509399414, 0.14446629583835602, -0.03028380312025547, 0.13770435750484467, -0.24278727173805237, -0.008999002166092396, 0.12235064059495926, -0.02966066263616085, 0.016164565458893776, 0.1966656595468521, -0.054361000657081604, -0.0005825439002364874, 0.11197305470705032, 0.12321557849645615, -0.08745358139276505, -0.07936365902423859, 0.07408939301967621, 0.03733079507946968, -0.12893202900886536, -0.011227075010538101, -0.012300225906074047, 0.000895290169864893, -0.087419793009758, 0.09686329960823059, -0.025516953319311142, 0.02749079093337059, -0.029481956735253334, 0.00849226862192154, -0.0034446369390934706, -0.09499223530292511, -0.052789218723773956, -0.08998869359493256, 0.044385965913534164, -0.017365654930472374, 0.2216791808605194, 0.17761467397212982, -0.09042495489120483, 0.03415585681796074, 0.135173037648201, -0.07369592040777206, -0.07044259458780289, 0.16980932652950287, 0.11392328143119812, -0.06063543260097504, -0.13644221425056458, -0.01892513409256935, 0.00491249980404973, -0.09873558580875397, 0.168105348944664, 0.09789353609085083, 0.11412929743528366, 0.005007277242839336, 0.004335571080446243, -0.01606898009777069, 0.0678504928946495, -0.04695241153240204, 0.0033522718586027622, 0.04015097767114639, -0.003407274605706334, 0.0883537009358406, 0.20725540816783905, -0.004607534036040306, 0.031912077218294144, -0.04650438576936722, -0.06470412760972977, -0.0054633356630802155, -0.1870754361152649, 0.01017508003860712, -0.10791673511266708, 0.019750891253352165, -0.06390492618083954, 0.03504745662212372, 0.0054527102038264275, 0.07829160988330841, -0.08570341765880585, 0.11645437777042389, -0.1492246389389038, -0.09591501951217651, 0.17507421970367432, -0.011597269214689732, 0.04794841632246971, -0.15671512484550476, -0.072654128074646, -0.08923245966434479, -0.0434567965567112, 0.03190504387021065, 0.08769562840461731, -0.13055750727653503, -0.08238688111305237, -0.21569982171058655, -0.08171073347330093, 0.0023810125421732664, 0.06890152394771576, 0.030310405418276787, 0.099310964345932, 0.023560384288430214, 0.006051009986549616, -0.0337604396045208, 0.10421518981456757, 0.05073492228984833, -0.07326751947402954, 0.09249651432037354, -0.04411530867218971, -0.1240914985537529, -0.005072480067610741, -0.05393553897738457, -0.05657564848661423, -0.03985069692134857, 0.1421281397342682, 0.3351793885231018, 0.03730513155460358, 0.040116842836141586, -0.12162374705076218, 0.06976012140512466, -0.020093226805329323, 0.0460829995572567, -0.04784868657588959, 0.08247403055429459, -0.03648039698600769, 0.11123541742563248, 0.003606430720537901, 0.017474107444286346, -0.09539377689361572, 0.0018556693103164434, 0.11733667552471161, -0.061501093208789825, 0.0009621413191780448, 0.214571014046669, -0.17938697338104248, 0.07654477655887604, 0.1548205465078354, -0.10795818269252777, -0.056399475783109665, -0.0024040089920163155, 0.1268359273672104, 0.1567411869764328, 0.08193014562129974, -0.005197214428335428, -0.12396648526191711, -0.05246103182435036, 0.02814353071153164, -0.3572153151035309, -0.09079553186893463, 0.0950532779097557, -0.0732199028134346, 0.11399929225444794, -0.10220859199762344, 0.024146893993020058, 0.07992000132799149, -0.01861151121556759, 0.034855011850595474, 0.06486312299966812, 0.09604781121015549, 0.13146445155143738, -0.013425965793430805, 0.048706844449043274, 0.029420973733067513, -0.11526963114738464, 0.1172492578625679, -0.1468210518360138, 0.007102258503437042, 0.04354609549045563, 0.027479609474539757, -0.11343857645988464, 0.15519212186336517, -0.04767230153083801, -0.015246069058775902, 0.09831508994102478, 0.020242569968104362, 0.024990256875753403, -0.03289783373475075, -0.0724228248000145, 0.0750422403216362, -0.11918734014034271, -0.03665538877248764, 0.03240475803613663, -0.1270979344844818, 0.14575695991516113, 0.019008805975317955, -0.15370799601078033, 0.014436058700084686, -0.06379807740449905, 0.05624331161379814, -0.06824839115142822, 0.04382333531975746, 0.06723164767026901, -0.028868163004517555, 0.018161233514547348, -0.31317034363746643, 0.06216549128293991, 0.1304154098033905, -0.049842700362205505, 0.004410917405039072 ]
4ae7884c0615c2af2c4ec1ec81e71c5425b0121e
# DAPR: Document-Aware Passage Retrieval This datasets repo contains the queries, passages/documents and judgements for the data used in the [DAPR](https://arxiv.org/abs/2305.13915) paper. ## Overview For the DAPR benchmark, it contains 5 datasets: | Dataset | #Queries (test) | #Documents | #Passages | --- | --- | --- | --- | | [MS MARCO](https://microsoft.github.io/msmarco/) | 2,722 | 1,359,163 | 2,383,023* | | [Natural Questions](https://ai.google.com/research/NaturalQuestions) | 3,610 | 108,626 | 2,682,017| | [MIRACL](https://project-miracl.github.io/) | 799 | 5,758,285 |32,893,221| | [Genomics](https://dmice.ohsu.edu/trec-gen/) | 62 | 162,259 |12,641,127| | [ConditionalQA](https://haitian-sun.github.io/conditionalqa/) | 271 | 652 |69,199| And additionally, NQ-hard, the hard subset of queries from Natural Questions is also included (516 in total). These queries are hard because understanding the document context (e.g. coreference, main topic, multi-hop reasoning, and acronym) is necessary for retrieving the relevant passages. > Notes: for MS MARCO, its documents do not provide the gold paragraph segmentation and we only segment the document by keeping the judged passages (from the MS MARCO Passage Ranking task) standing out while leaving the rest parts surrounding these passages. These passages are marked by `is_candidate==true`. > For Natural Questions, the training split is not provided because the duplidate timestamps cannot be compatible with the queries/qrels/corpus format. Please refer to https://public.ukp.informatik.tu-darmstadt.de/kwang/dapr/data/NaturalQuestions/ for the training split. ## Load the dataset ### Loading the passages One can load the passages like this: ```python from datasets import load_dataset dataset_name = "ConditionalQA" passages = load_dataset("UKPLab/dapr", f"{dataset_name}-corpus", split="test") for passage in passages: passage["_id"] # passage id passage["text"] # passage text passage["title"] # doc title passage["doc_id"] passage["paragraph_no"] # the paragraph number within the document passage["total_paragraphs"] # how many paragraphs/passages in total in the document passage["is_candidate"] # is this passage a candidate for retrieval ``` Or strem the dataset without downloading it beforehand: ```python from datasets import load_dataset dataset_name = "ConditionalQA" passages = load_dataset( "UKPLab/dapr", f"{dataset_name}-corpus", split="test", streaming=True ) for passage in passages: passage["_id"] # passage id passage["text"] # passage text passage["title"] # doc title passage["doc_id"] passage["paragraph_no"] # the paragraph number within the document passage["total_paragraphs"] # how many paragraphs/passages in total in the document passage["is_candidate"] # is this passage a candidate for retrieval ``` ### Loading the qrels The qrels split contains the query relevance annotation, i.e., it contains the relevance score for (query, passage) pairs. ```python from datasets import load_dataset dataset_name = "ConditionalQA" qrels = load_dataset("UKPLab/dapr", f"{dataset_name}-qrels", split="test") for qrel in qrels: qrel["query_id"] # query id (the text is available in ConditionalQA-queries) qrel["corpus_id"] # passage id qrel["score"] # gold judgement ``` We present the NQ-hard dataset in an extended format of the normal qrels with additional columns: ```python from datasets import load_dataset qrels = load_dataset("UKPLab/dapr", "nq-hard", split="test") for qrel in qrels: qrel["query_id"] # query id (the text is available in ConditionalQA-queries) qrel["corpus_id"] # passage id qrel["score"] # gold judgement # Additional columns: qrel["query"] # query text qrel["text"] # passage text qrel["title"] # doc title qrel["doc_id"] qrel["categories"] # list of categories about this query-passage pair qrel["url"] # url to the document in Wikipedia ``` ## Retrieval and Evaluation The following shows an example, how the dataset can be used to build a semantic search application. > This example is based on [clddp](https://github.com/kwang2049/clddp/tree/main) (`pip install -U cldpp`). One can further explore this [example](https://github.com/kwang2049/clddp/blob/main/examples/search_fiqa.sh) for convenient multi-GPU exact search. ```python # Please install cldpp with `pip install -U cldpp` from clddp.retriever import Retriever, RetrieverConfig, Pooling, SimilarityFunction from clddp.dm import Separator from typing import Dict from clddp.dm import Query, Passage import torch import pytrec_eval import numpy as np from datasets import load_dataset # Define the retriever (DRAGON+ from https://arxiv.org/abs/2302.07452) class DRAGONPlus(Retriever): def __init__(self) -> None: config = RetrieverConfig( query_model_name_or_path="facebook/dragon-plus-query-encoder", passage_model_name_or_path="facebook/dragon-plus-context-encoder", shared_encoder=False, sep=Separator.blank, pooling=Pooling.cls, similarity_function=SimilarityFunction.dot_product, query_max_length=512, passage_max_length=512, ) super().__init__(config) # Load data: passages = load_dataset("UKPLab/dapr", "ConditionalQA-corpus", split="test") queries = load_dataset("UKPLab/dapr", "ConditionalQA-queries", split="test") qrels_rows = load_dataset("UKPLab/dapr", "ConditionalQA-qrels", split="test") qrels: Dict[str, Dict[str, float]] = {} for qrel_row in qrels_rows: qid = qrel_row["query_id"] pid = qrel_row["corpus_id"] rel = qrel_row["score"] qrels.setdefault(qid, {}) qrels[qid][pid] = rel # Encode queries and passages: (refer to https://github.com/kwang2049/clddp/blob/main/examples/search_fiqa.sh for multi-GPU exact search) retriever = DRAGONPlus() retriever.eval() queries = [Query(query_id=query["_id"], text=query["text"]) for query in queries] passages = [ Passage(passage_id=passage["_id"], text=passage["text"]) for passage in passages ] query_embeddings = retriever.encode_queries(queries) with torch.no_grad(): # Takes around a minute on a V100 GPU passage_embeddings, passage_mask = retriever.encode_passages(passages) # Calculate the similarities and keep top-K: similarity_scores = torch.matmul( query_embeddings, passage_embeddings.t() ) # (query_num, passage_num) topk = torch.topk(similarity_scores, k=10) topk_values: torch.Tensor = topk[0] topk_indices: torch.LongTensor = topk[1] topk_value_lists = topk_values.tolist() topk_index_lists = topk_indices.tolist() # Run evaluation with pytrec_eval: retrieval_scores: Dict[str, Dict[str, float]] = {} for query_i, (values, indices) in enumerate(zip(topk_value_lists, topk_index_lists)): query_id = queries[query_i].query_id retrieval_scores.setdefault(query_id, {}) for value, passage_i in zip(values, indices): passage_id = passages[passage_i].passage_id retrieval_scores[query_id][passage_id] = value evaluator = pytrec_eval.RelevanceEvaluator( query_relevance=qrels, measures=["ndcg_cut_10"] ) query_performances: Dict[str, Dict[str, float]] = evaluator.evaluate(retrieval_scores) ndcg = np.mean([score["ndcg_cut_10"] for score in query_performances.values()]) print(ndcg) # 0.21796083196880855 ``` ## Note This dataset was created with `datasets==2.15.0`. Make sure to use this or a newer version of the datasets library. ## Citation If you use the code/data, feel free to cite our publication [DAPR: A Benchmark on Document-Aware Passage Retrieval](https://arxiv.org/abs/2305.13915): ```bibtex @article{wang2023dapr, title = "DAPR: A Benchmark on Document-Aware Passage Retrieval", author = "Kexin Wang and Nils Reimers and Iryna Gurevych", journal= "arXiv preprint arXiv:2305.13915", year = "2023", url = "https://arxiv.org/abs/2305.13915", } ```
UKPLab/dapr
[ "arxiv:2305.13915", "arxiv:2302.07452", "region:us" ]
2024-01-25T15:17:57+00:00
{"configs": [{"config_name": "ConditionalQA-corpus", "data_files": [{"split": "test", "path": "ConditionalQA/corpus/*"}]}, {"config_name": "ConditionalQA-docs", "data_files": [{"split": "test", "path": "ConditionalQA/docs/*"}]}, {"config_name": "ConditionalQA-corpus_coref", "data_files": [{"split": "test", "path": "ConditionalQA/corpus_coref/*"}]}, {"config_name": "ConditionalQA-queries", "data_files": [{"split": "train", "path": "ConditionalQA/queries/train.parquet"}, {"split": "dev", "path": "ConditionalQA/queries/dev.parquet"}, {"split": "test", "path": "ConditionalQA/queries/test.parquet"}]}, {"config_name": "ConditionalQA-qrels", "data_files": [{"split": "train", "path": "ConditionalQA/qrels/train.parquet"}, {"split": "dev", "path": "ConditionalQA/qrels/dev.parquet"}, {"split": "test", "path": "ConditionalQA/qrels/test.parquet"}]}, {"config_name": "ConditionalQA-keyphrases", "data_files": [{"split": "test", "path": "ConditionalQA/keyphrases/*"}]}, {"config_name": "NaturalQuestions-corpus", "data_files": [{"split": "test", "path": "NaturalQuestions/corpus/*"}]}, {"config_name": "NaturalQuestions-docs", "data_files": [{"split": "test", "path": "NaturalQuestions/docs/*"}]}, {"config_name": "NaturalQuestions-corpus_coref", "data_files": [{"split": "test", "path": "NaturalQuestions/corpus_coref/*"}]}, {"config_name": "nq-hard", "data_files": [{"split": "test", "path": "NaturalQuestions/nq-hard/*"}]}, {"config_name": "NaturalQuestions-queries", "data_files": [{"split": "dev", "path": "NaturalQuestions/queries/dev.parquet"}, {"split": "test", "path": "NaturalQuestions/queries/test.parquet"}]}, {"config_name": "NaturalQuestions-qrels", "data_files": [{"split": "dev", "path": "NaturalQuestions/qrels/dev.parquet"}, {"split": "test", "path": "NaturalQuestions/qrels/test.parquet"}]}, {"config_name": "NaturalQuestions-keyphrases", "data_files": [{"split": "test", "path": "NaturalQuestions/keyphrases/*"}]}, {"config_name": "Genomics-corpus", "data_files": [{"split": "test", "path": "Genomics/corpus/*"}]}, {"config_name": "Genomics-docs", "data_files": [{"split": "test", "path": "Genomics/docs/*"}]}, {"config_name": "Genomics-corpus_coref", "data_files": [{"split": "test", "path": "Genomics/corpus_coref/*"}]}, {"config_name": "Genomics-queries", "data_files": [{"split": "test", "path": "Genomics/queries/test.parquet"}]}, {"config_name": "Genomics-qrels", "data_files": [{"split": "test", "path": "Genomics/qrels/test.parquet"}]}, {"config_name": "Genomics-keyphrases", "data_files": [{"split": "test", "path": "Genomics/keyphrases/*"}]}, {"config_name": "MSMARCO-corpus", "data_files": [{"split": "test", "path": "MSMARCO/corpus/*"}]}, {"config_name": "MSMARCO-docs", "data_files": [{"split": "test", "path": "MSMARCO/docs/*"}]}, {"config_name": "MSMARCO-corpus_coref", "data_files": [{"split": "test", "path": "MSMARCO/corpus_coref/*"}]}, {"config_name": "MSMARCO-queries", "data_files": [{"split": "train", "path": "MSMARCO/queries/train.parquet"}, {"split": "dev", "path": "MSMARCO/queries/dev.parquet"}, {"split": "test", "path": "MSMARCO/queries/test.parquet"}]}, {"config_name": "MSMARCO-qrels", "data_files": [{"split": "train", "path": "MSMARCO/qrels/train.parquet"}, {"split": "dev", "path": "MSMARCO/qrels/dev.parquet"}, {"split": "test", "path": "MSMARCO/qrels/test.parquet"}]}, {"config_name": "MSMARCO-keyphrases", "data_files": [{"split": "test", "path": "MSMARCO/keyphrases/*"}]}, {"config_name": "MIRACL-corpus", "data_files": [{"split": "test", "path": "MIRACL/corpus/*"}]}, {"config_name": "MIRACL-docs", "data_files": [{"split": "test", "path": "MIRACL/docs/*"}]}, {"config_name": "MIRACL-corpus_coref", "data_files": [{"split": "test", "path": "MIRACL/corpus_coref/*"}]}, {"config_name": "MIRACL-queries", "data_files": [{"split": "train", "path": "MIRACL/queries/train.parquet"}, {"split": "dev", "path": "MIRACL/queries/dev.parquet"}, {"split": "test", "path": "MIRACL/queries/test.parquet"}]}, {"config_name": "MIRACL-qrels", "data_files": [{"split": "train", "path": "MIRACL/qrels/train.parquet"}, {"split": "dev", "path": "MIRACL/qrels/dev.parquet"}, {"split": "test", "path": "MIRACL/qrels/test.parquet"}]}, {"config_name": "MIRACL-keyphrases", "data_files": [{"split": "test", "path": "MIRACL/keyphrases/*"}]}]}
2024-02-09T09:45:07+00:00
[ "2305.13915", "2302.07452" ]
[]
TAGS #arxiv-2305.13915 #arxiv-2302.07452 #region-us
DAPR: Document-Aware Passage Retrieval ====================================== This datasets repo contains the queries, passages/documents and judgements for the data used in the DAPR paper. Overview -------- For the DAPR benchmark, it contains 5 datasets: And additionally, NQ-hard, the hard subset of queries from Natural Questions is also included (516 in total). These queries are hard because understanding the document context (e.g. coreference, main topic, multi-hop reasoning, and acronym) is necessary for retrieving the relevant passages. > > Notes: for MS MARCO, its documents do not provide the gold paragraph segmentation and we only segment the document by keeping the judged passages (from the MS MARCO Passage Ranking task) standing out while leaving the rest parts surrounding these passages. These passages are marked by 'is\_candidate==true'. > > > > > For Natural Questions, the training split is not provided because the duplidate timestamps cannot be compatible with the queries/qrels/corpus format. Please refer to URL for the training split. > > > Load the dataset ---------------- ### Loading the passages One can load the passages like this: Or strem the dataset without downloading it beforehand: ### Loading the qrels The qrels split contains the query relevance annotation, i.e., it contains the relevance score for (query, passage) pairs. We present the NQ-hard dataset in an extended format of the normal qrels with additional columns: Retrieval and Evaluation ------------------------ The following shows an example, how the dataset can be used to build a semantic search application. > > This example is based on clddp ('pip install -U cldpp'). One can further explore this example for convenient multi-GPU exact search. > > > Note ---- This dataset was created with 'datasets==2.15.0'. Make sure to use this or a newer version of the datasets library. If you use the code/data, feel free to cite our publication DAPR: A Benchmark on Document-Aware Passage Retrieval:
[ "### Loading the passages\n\n\nOne can load the passages like this:\n\n\nOr strem the dataset without downloading it beforehand:", "### Loading the qrels\n\n\nThe qrels split contains the query relevance annotation, i.e., it contains the relevance score for (query, passage) pairs.\n\n\nWe present the NQ-hard dataset in an extended format of the normal qrels with additional columns:\n\n\nRetrieval and Evaluation\n------------------------\n\n\nThe following shows an example, how the dataset can be used to build a semantic search application.\n\n\n\n> \n> This example is based on clddp ('pip install -U cldpp'). One can further explore this example for convenient multi-GPU exact search.\n> \n> \n> \n\n\nNote\n----\n\n\nThis dataset was created with 'datasets==2.15.0'. Make sure to use this or a newer version of the datasets library.\n\n\nIf you use the code/data, feel free to cite our publication DAPR: A Benchmark on Document-Aware Passage Retrieval:" ]
[ "TAGS\n#arxiv-2305.13915 #arxiv-2302.07452 #region-us \n", "### Loading the passages\n\n\nOne can load the passages like this:\n\n\nOr strem the dataset without downloading it beforehand:", "### Loading the qrels\n\n\nThe qrels split contains the query relevance annotation, i.e., it contains the relevance score for (query, passage) pairs.\n\n\nWe present the NQ-hard dataset in an extended format of the normal qrels with additional columns:\n\n\nRetrieval and Evaluation\n------------------------\n\n\nThe following shows an example, how the dataset can be used to build a semantic search application.\n\n\n\n> \n> This example is based on clddp ('pip install -U cldpp'). One can further explore this example for convenient multi-GPU exact search.\n> \n> \n> \n\n\nNote\n----\n\n\nThis dataset was created with 'datasets==2.15.0'. Make sure to use this or a newer version of the datasets library.\n\n\nIf you use the code/data, feel free to cite our publication DAPR: A Benchmark on Document-Aware Passage Retrieval:" ]
[ 23, 28, 207 ]
[ "passage: TAGS\n#arxiv-2305.13915 #arxiv-2302.07452 #region-us \n### Loading the passages\n\n\nOne can load the passages like this:\n\n\nOr strem the dataset without downloading it beforehand:### Loading the qrels\n\n\nThe qrels split contains the query relevance annotation, i.e., it contains the relevance score for (query, passage) pairs.\n\n\nWe present the NQ-hard dataset in an extended format of the normal qrels with additional columns:\n\n\nRetrieval and Evaluation\n------------------------\n\n\nThe following shows an example, how the dataset can be used to build a semantic search application.\n\n\n\n> \n> This example is based on clddp ('pip install -U cldpp'). One can further explore this example for convenient multi-GPU exact search.\n> \n> \n> \n\n\nNote\n----\n\n\nThis dataset was created with 'datasets==2.15.0'. Make sure to use this or a newer version of the datasets library.\n\n\nIf you use the code/data, feel free to cite our publication DAPR: A Benchmark on Document-Aware Passage Retrieval:" ]
[ -0.11108065396547318, 0.2294212132692337, -0.0029850206337869167, 0.058111436665058136, 0.07255145907402039, 0.026205923408269882, -0.042129844427108765, 0.11518177390098572, 0.04827568307518959, 0.0823163241147995, 0.10589953511953354, 0.03527902811765671, 0.03614192083477974, 0.1707959771156311, -0.013976720161736012, -0.09800989180803299, 0.00796017237007618, -0.005856916308403015, 0.02987646870315075, 0.10999248921871185, 0.06212365999817848, -0.08894969522953033, 0.12128103524446487, -0.038983315229415894, -0.07243708521127701, 0.04706626385450363, -0.057507943361997604, -0.026882071048021317, 0.0963568314909935, 0.016519863158464432, 0.04608534276485443, 0.032165613025426865, 0.0651499480009079, -0.12774404883384705, 0.04042372852563858, 0.056938234716653824, -0.013560264371335506, 0.05902187153697014, 0.04793044179677963, -0.04647194221615791, -0.0003937767178285867, 0.026089493185281754, -0.015824133530259132, 0.031905148178339005, -0.0876239463686943, -0.15127070248126984, -0.11779804527759552, -0.06493102014064789, 0.03092171996831894, 0.05524734407663345, -0.00179460016079247, 0.07901611179113388, -0.061110999435186386, 0.00984431803226471, 0.1651642918586731, -0.26219436526298523, -0.016675572842359543, 0.12485315650701523, 0.041451346129179, 0.1134725883603096, -0.04651760309934616, 0.004318369552493095, 0.015735091641545296, 0.045094601809978485, 0.07228149473667145, -0.06783410906791687, -0.20372873544692993, 0.04993310943245888, -0.09895466268062592, -0.03381437808275223, 0.3185287415981293, -0.05900731682777405, -0.05169370770454407, 0.000775947526562959, -0.08393330872058868, 0.03841259330511093, 0.002508441684767604, 0.0032682884484529495, -0.0061723412945866585, 0.03121267817914486, 0.01678893156349659, -0.11356610804796219, -0.06428161263465881, -0.1072392538189888, -0.10649250447750092, -0.02097506634891033, -0.0010502386139705777, 0.05263149365782738, 0.0335359200835228, 0.0740455612540245, -0.15647824108600616, -0.014017077162861824, -0.08522302657365799, -0.14017556607723236, -0.045898690819740295, 0.05249859020113945, -0.07475852221250534, -0.09970748424530029, 0.08073797821998596, 0.1581876128911972, 0.07785496860742569, 0.04902291297912598, -0.06810928881168365, 0.03605683892965317, 0.025452714413404465, 0.06640882790088654, -0.029516052454710007, -0.014597009867429733, 0.08085976541042328, 0.0031210698653012514, 0.021755803376436234, -0.03150571137666702, -0.06631637364625931, -0.08681467175483704, -0.04211743548512459, 0.08314602077007294, 0.010043355636298656, 0.031497012823820114, -0.01903621479868889, -0.04488946869969368, 0.06838975846767426, -0.11486208438873291, -0.05651916190981865, 0.07047225534915924, -0.051373690366744995, -0.1254325658082962, 0.0487513467669487, -0.03716268762946129, -0.05866049975156784, -0.029740147292613983, -0.08094768971204758, -0.02878323197364807, -0.04954834654927254, -0.07392069697380066, 0.021828386932611465, -0.1072854995727539, -0.045407772064208984, -0.11869040876626968, -0.22510923445224762, -0.03668336942791939, 0.022724520415067673, -0.00336387543939054, 0.023475758731365204, 0.017357314005494118, 0.046896036714315414, -0.042198412120342255, -0.048424553126096725, -0.027024514973163605, -0.054720982909202576, 0.0914633721113205, 0.0005143264424987137, 0.06018954887986183, -0.10106434673070908, 0.056192606687545776, -0.10693996399641037, 0.055855121463537216, -0.0826154425740242, 0.12394601851701736, -0.09921472519636154, -0.06552029401063919, -0.10160094499588013, -0.01247172150760889, -0.07289522141218185, -0.008390579372644424, 0.034635886549949646, 0.12458948791027069, -0.2667655944824219, 0.03030882216989994, 0.1166691705584526, -0.07865725457668304, -0.10667947679758072, 0.06523075699806213, -0.04362821951508522, -0.034385669976472855, 0.03759095072746277, 0.23197868466377258, 0.1669732928276062, -0.08848736435174942, -0.09998102486133575, 0.04948965460062027, -0.03834385797381401, -0.0740986168384552, 0.10673437267541885, -0.009360096417367458, -0.006604020018130541, 0.04385320097208023, -0.107770636677742, -0.04255576804280281, 0.014256271533668041, -0.06688815355300903, -0.05110173672437668, -0.05254590883851051, 0.05043419077992439, -0.021564360707998276, -0.0408063605427742, 0.036325499415397644, 0.013988726772367954, -0.08016013354063034, 0.08182209730148315, -0.03726579248905182, -0.013512910343706608, -0.0674150213599205, 0.12521323561668396, -0.10240867733955383, 0.04785734787583351, -0.22323544323444366, -0.10964816063642502, 0.040300656110048294, -0.07519367337226868, 0.051700033247470856, -0.02980778180062771, 0.008505991660058498, 0.005403502378612757, 0.007845556363463402, 0.05779402330517769, -0.0526052787899971, -0.07122644037008286, -0.015106704086065292, 0.03778107464313507, -0.02282281033694744, -0.05359366908669472, 0.0075210281647741795, -0.11856156587600708, 0.0005734037840738893, 0.034196775406599045, 0.031163087114691734, 0.013256051577627659, -0.04074958339333534, 0.07259297370910645, 0.005008843261748552, -0.003615435678511858, -0.02038605697453022, 0.04281095415353775, 0.008112284354865551, -0.03375038877129555, 0.07562145590782166, -0.11335916072130203, 0.019547848030924797, 0.056659355759620667, 0.03527478501200676, 0.017370037734508514, -0.11757712066173553, -0.05489162728190422, 0.015234358608722687, -0.10358667373657227, 0.0010240698466077447, 0.11613069474697113, 0.0494069866836071, 0.07657025754451752, -0.12030840665102005, 0.005900838412344456, -0.00315883313305676, 0.03534308075904846, 0.008129865862429142, 0.07304005324840546, 0.12298478931188583, -0.06443187594413757, 0.06740537285804749, 0.12003807723522186, -0.040283203125, 0.05184958502650261, -0.013891847804188728, -0.08086968213319778, 0.004459017422050238, 0.04697481170296669, -0.005266046151518822, 0.11445195227861404, 0.09782060235738754, 0.027648966759443283, 0.07291300594806671, -0.01314285583794117, 0.04392588511109352, -0.09151159971952438, -0.006473394576460123, -0.028017651289701462, -0.059422221034765244, -0.07339741289615631, 0.019488077610731125, 0.019567808136343956, 0.07137302309274673, 0.033577632158994675, 0.1206073984503746, -0.015744131058454514, -0.026731086894869804, -0.062489960342645645, 0.20809215307235718, -0.1337968111038208, -0.25377216935157776, -0.14577637612819672, 0.04596184939146042, -0.10345150530338287, 0.01545101311057806, 0.031837549060583115, -0.04338185861706734, -0.03279079496860504, -0.059446629136800766, 0.024624332785606384, -0.007006029598414898, -0.08876162767410278, -0.1014171615242958, 0.026652518659830093, 0.01191648282110691, -0.11683789640665054, 0.061348386108875275, -0.012343328446149826, -0.022442147135734558, 0.011577545665204525, -0.006763050332665443, 0.13371498882770538, -0.001935651060193777, -0.019073883071541786, -0.00481615262106061, 0.00698488624766469, 0.17931953072547913, -0.05100663751363754, -0.03187951445579529, 0.1223202720284462, -0.06887251883745193, 0.031543876975774765, 0.06382166594266891, 0.009204719215631485, -0.09728126972913742, 0.012989668175578117, 0.062120385468006134, -0.07442876696586609, -0.24418973922729492, -0.09841839969158173, -0.07263322174549103, 0.08105114102363586, 0.052365951240062714, 0.049212608486413956, -0.14883923530578613, 0.07473186403512955, -0.0256208386272192, 0.0049156323075294495, -0.061454106122255325, 0.027913829311728477, 0.2725861966609955, -0.02872876077890396, 0.032067522406578064, -0.06506416946649551, -0.008148141205310822, 0.11305692046880722, 0.1796070635318756, 0.21882151067256927, -0.09375910460948944, 0.09914566576480865, 0.05008923262357712, 0.12251431494951248, 0.054566413164138794, 0.07281310111284256, -0.009424741379916668, 0.03282421454787254, -0.035205479711294174, -0.05921866372227669, -0.11252696067094803, 0.020778248086571693, 0.000024155981009243988, -0.09297779202461243, 0.04113109037280083, -0.01961267925798893, 0.038002002984285355, 0.10041102766990662, -0.013503102585673332, -0.18617048859596252, -0.03499162197113037, -0.018797684460878372, 0.05483931303024292, -0.09437296539545059, 0.0871451273560524, 0.09869597852230072, -0.0654473677277565, 0.012703320011496544, -0.057221636176109314, 0.11420449614524841, -0.10984031111001968, -0.020987018942832947, -0.06898908317089081, 0.05510127916932106, -0.01018584705889225, 0.11917217820882797, -0.18903720378875732, 0.08861074596643448, 0.05721794068813324, -0.003542656311765313, -0.015881432220339775, -0.030460886657238007, 0.04559919238090515, 0.017771592363715172, 0.1483684480190277, 0.005667204037308693, -0.0662558302283287, -0.015636436641216278, -0.1386680006980896, 0.10586061328649521, 0.04927492514252663, -0.03540929779410362, 0.04095414653420448, -0.0006558306631632149, 0.02563466876745224, -0.07180915027856827, 0.060705482959747314, -0.07454057037830353, -0.15454035997390747, 0.029758792370557785, 0.002526982920244336, -0.0011199098080396652, 0.008697791956365108, 0.0242425799369812, 0.09603556990623474, 0.015667472034692764, -0.16249607503414154, -0.06197022646665573, -0.10491008311510086, 0.016024019569158554, 0.17780376970767975, -0.04058605059981346, 0.02889883518218994, -0.021401744335889816, 0.10434436798095703, -0.019440853968262672, -0.16613131761550903, -0.001749957911670208, -0.06627947837114334, -0.05966797098517418, -0.03089115582406521, 0.033309370279312134, -0.0025943543296307325, 0.02949231117963791, 0.01420380175113678, 0.060695234686136246, -0.06548436731100082, -0.09434457868337631, 0.024685293436050415, 0.06256671994924545, 0.082479327917099, 0.10600370168685913, -0.12080082297325134, 0.0015024775639176369, -0.031164173036813736, -0.0004180895921308547, 0.19338904321193695, 0.08736946433782578, -0.08851165324449539, 0.11485643684864044, 0.16323330998420715, -0.07854355871677399, -0.3029932975769043, -0.04860954359173775, 0.027025489136576653, 0.016728710383176804, 0.010493779554963112, -0.2650775909423828, 0.06148144602775574, 0.1618078351020813, -0.020829226821660995, 0.0751391351222992, -0.21545493602752686, -0.04114075005054474, 0.09199792891740799, -0.028543787077069283, 0.029483171179890633, -0.10823650658130646, -0.08466370403766632, -0.009428855031728745, -0.06798865646123886, 0.20216718316078186, -0.07141607999801636, 0.07644680142402649, -0.062093574553728104, 0.049478571861982346, 0.0252104289829731, -0.008322459645569324, 0.10281810909509659, -0.01679375395178795, 0.018550042062997818, -0.029671143740415573, 0.04308588430285454, -0.018971120938658714, -0.03848297894001007, 0.12990877032279968, -0.023432224988937378, 0.11341816931962967, -0.15652433037757874, -0.017103802412748337, -0.0323244072496891, 0.06473013758659363, 0.019674276933073997, -0.03485206514596939, -0.09894430637359619, -0.015635501593351364, -0.008395830169320107, -0.015121069736778736, 0.06769678741693497, -0.0167533066123724, 0.09095783531665802, 0.14712630212306976, 0.08999228477478027, 0.07121933251619339, -0.12760716676712036, 0.002628486370667815, -0.004224210977554321, 0.09312471747398376, -0.18749985098838806, 0.02386609837412834, 0.15360207855701447, 0.028415776789188385, 0.06146430969238281, 0.04348999261856079, -0.09167203307151794, 0.07727561146020889, 0.0476786233484745, -0.1713167428970337, -0.07705951482057571, -0.0006671183509752154, 0.009511040523648262, -0.10317020118236542, -0.016377907246351242, 0.11167990416288376, -0.04176797717809677, -0.053828924894332886, 0.031006604433059692, 0.04866483062505722, 0.05107526481151581, 0.12823925912380219, 0.10595457255840302, 0.0036731490399688482, -0.10363825410604477, 0.15242236852645874, 0.07095268368721008, -0.14197714626789093, 0.03095407597720623, 0.08349648118019104, -0.09855744242668152, -0.07749199867248535, -0.04406685009598732, 0.09510959684848785, -0.09938232600688934, -0.04434138908982277, -0.011488135904073715, -0.021048743277788162, 0.04615366831421852, 0.15685062110424042, 0.020939448848366737, 0.050122808665037155, 0.007827483117580414, -0.03733358532190323, -0.021223878487944603, 0.11368449032306671, -0.042618900537490845, 0.05020841583609581, -0.09409923851490021, -0.08329129964113235, -0.022928228601813316, 0.10128828883171082, -0.0321098156273365, -0.01973607949912548, -0.06217401102185249, -0.010227609425783157, -0.17424829304218292, 0.08092266321182251, -0.031765785068273544, 0.030514148995280266, -0.04159446060657501, 0.03821801766753197, -0.07712824642658234, 0.012006737291812897, -0.08277498930692673, -0.02379973977804184, -0.08769907057285309, 0.06773478537797928, -0.1507529765367508, 0.00902810599654913, 0.05956750735640526, -0.005763866472989321, 0.11169964075088501, 0.07916013896465302, -0.03805803880095482, 0.021169766783714294, 0.019397560507059097, -0.08556775748729706, -0.008379205130040646, 0.11609692126512527, 0.007689039222896099, 0.03279649093747139, 0.03198399022221565, 0.016054734587669373, -0.06091025099158287, -0.00013376155402511358, 0.18916034698486328, -0.11624086648225784, -0.022770337760448456, -0.006558253429830074, 0.02267446368932724, -0.0422828271985054, 0.008597349748015404, 0.09809137880802155, 0.17414194345474243, 0.047263987362384796, -0.046051736921072006, 0.04948695749044418, -0.09242026507854462, -0.031208528205752373, -0.01583998277783394, -0.07217919081449509, -0.06378365308046341, -0.02615327760577202, 0.05359455198049545, -0.0022039220202714205, 0.2582244575023651, -0.0064905802719295025, 0.03525382652878761, -0.003964225295931101, 0.0797077938914299, -0.053099919110536575, -0.02194570191204548, 0.08106257021427155, 0.06598866730928421, -0.045898113399744034, 0.03815757483243942, 0.06640613824129105, -0.007790247444063425, 0.04564946889877319, 0.05925930663943291, 0.10490643978118896, 0.14162562787532806, -0.007359952200204134, 0.05762789770960808, -0.09833896160125732, 0.003397942055016756, 0.05527208372950554, 0.009212173521518707, 0.07958969473838806, -0.02421189472079277, 0.08420141786336899, 0.10222688317298889, -0.05642537400126457, 0.08753602206707001, -0.04331228882074356, -0.04948125779628754, -0.11409302800893784, 0.0031234384514391422, -0.07318289577960968, -0.11682529747486115, -0.0290827639400959, -0.156645730137825, 0.01304667443037033, 0.1444680541753769, -0.014440346509218216, -0.00776876974850893, 0.13795942068099976, -0.030326535925269127, -0.1552426815032959, 0.004430603701621294, -0.0073194666765630245, 0.009483247995376587, 0.06094764545559883, 0.045271966606378555, 0.13614793121814728, -0.02299319952726364, 0.07928279042243958, 0.06009051203727722, 0.13787245750427246, 0.03882915899157524, -0.1372130811214447, 0.001523891114629805, -0.04547009989619255, 0.03812483325600624, -0.019146068021655083, 0.0933040902018547, 0.03304709866642952, -0.05000026151537895, 0.030196521431207657, 0.2545677423477173, 0.008796137757599354, 0.07331409305334091, -0.14799848198890686, 0.14979010820388794, -0.00709402235224843, 0.02952628582715988, -0.0016508083790540695, -0.06017312407493591, 0.00498053478077054, 0.18087470531463623, 0.11061737686395645, -0.09166289120912552, -0.017184864729642868, 0.06503153592348099, 0.023914193734526634, 0.009333401918411255, 0.11860794574022293, 0.06861981004476547, 0.2881520688533783, -0.05320575460791588, -0.031884387135505676, -0.06171681731939316, 0.017016049474477768, -0.0746898427605629, 0.08718410134315491, -0.015723131597042084, -0.022754400968551636, -0.05990569666028023, 0.09047512710094452, -0.09630230069160461, -0.0724504366517067, -0.09748998284339905, -0.13170218467712402, -0.12373596429824829, -0.0913395881652832, -0.04334038123488426, -0.0077493758872151375, -0.017709290608763695, -0.03241061046719551, 0.013259700499475002, 0.05706729739904404, -0.020314684137701988, -0.12185756117105484, -0.0968390703201294, 0.07573942840099335, 0.02484261617064476, 0.17558860778808594, -0.02628401666879654, 0.019395295530557632, 0.045753370970487595, -0.04349840059876442, -0.12377040833234787, 0.060430530458688736, 0.057832956314086914, -0.012528070248663425, 0.007225946057587862, 0.056091003119945526, -0.03340965136885643, 0.10489220172166824, 0.05513694882392883, 0.025516368448734283, 0.010188826359808445, 0.1036224514245987, 0.009728364646434784, -0.0630222037434578, 0.035016369074583054, -0.119679294526577, 0.12002341449260712, 0.10500733554363251, -0.014696069061756134, 0.02674134261906147, -0.06241357699036598, 0.03731647506356239, 0.0024244538508355618, 0.061527419835329056, 0.04667360335588455, -0.06570813804864883, -0.027886247262358665, 0.02220512554049492, -0.01878896914422512, -0.20919035375118256, 0.025121163576841354, 0.04916470870375633, 0.018058426678180695, 0.03399142250418663, 0.06988199055194855, -0.0006456657429225743, 0.0026539883110672235, -0.04281186684966087, -0.08214344084262848, 0.020459910854697227, 0.005025082733482122, -0.0852971225976944, -0.07389648258686066 ]
082ace538e9f35ac2bbe71df9b7bfc5ecc229251
# Dataset Info <!-- Provide a quick summary of the dataset. --> Dataset obtained via https://www.kaggle.com/datasets/emrahaydemr/turkish-mail-dataset-normalspam
anilguven/turkish_spam_email
[ "task_categories:text-classification", "size_categories:1K<n<10K", "language:tr", "license:unknown", "turkish", "spam", "email", "text-classification", "region:us" ]
2024-01-25T15:43:35+00:00
{"language": ["tr"], "license": "unknown", "size_categories": ["1K<n<10K"], "task_categories": ["text-classification"], "tags": ["turkish", "spam", "email", "text-classification"]}
2024-01-25T15:52:03+00:00
[]
[ "tr" ]
TAGS #task_categories-text-classification #size_categories-1K<n<10K #language-Turkish #license-unknown #turkish #spam #email #text-classification #region-us
# Dataset Info Dataset obtained via URL
[ "# Dataset Info\n\n\n\nDataset obtained via URL" ]
[ "TAGS\n#task_categories-text-classification #size_categories-1K<n<10K #language-Turkish #license-unknown #turkish #spam #email #text-classification #region-us \n", "# Dataset Info\n\n\n\nDataset obtained via URL" ]
[ 55, 10 ]
[ "passage: TAGS\n#task_categories-text-classification #size_categories-1K<n<10K #language-Turkish #license-unknown #turkish #spam #email #text-classification #region-us \n# Dataset Info\n\n\n\nDataset obtained via URL" ]
[ 0.013918346725404263, 0.05278420448303223, -0.002583883237093687, 0.051328834146261215, 0.1256621778011322, 0.09177473187446594, 0.2642017900943756, 0.07009687274694443, 0.14336127042770386, -0.02590128406882286, 0.07389658689498901, -0.017726853489875793, 0.062262162566185, 0.13673420250415802, -0.05058152973651886, -0.23262150585651398, 0.020748944953083992, -0.1197599321603775, 0.019252892583608627, 0.0896172896027565, 0.09812291711568832, -0.018162501975893974, 0.05403970554471016, -0.10361547023057938, -0.06454020738601685, 0.05298321694135666, -0.0375322550535202, -0.12159601598978043, 0.03943774849176407, -0.040967244654893875, 0.07654422521591187, -0.03986624255776405, -0.010835383087396622, -0.13588157296180725, 0.004716218449175358, -0.059395499527454376, -0.06774421036243439, 0.002405994338914752, 0.0836355909705162, -0.1221044510602951, 0.13977578282356262, -0.238653764128685, -0.05213761329650879, 0.017573706805706024, -0.07171609252691269, -0.01373946014791727, -0.13168421387672424, 0.17546068131923676, 0.05831196531653404, 0.08076861500740051, -0.006108421366661787, 0.06906193494796753, -0.11373788118362427, 0.0576491579413414, 0.08226996660232544, -0.25466546416282654, 0.0008586839539930224, 0.033996857702732086, -0.08648180216550827, 0.1827124059200287, 0.01331010740250349, 0.08042296767234802, 0.022332826629281044, -0.047988686710596085, -0.16731992363929749, -0.05524173006415367, -0.20715340971946716, -0.03905486315488815, -0.012394064106047153, -0.047034114599227905, 0.32251450419425964, 0.0810915008187294, 0.09165596216917038, -0.12295989692211151, 0.024985935539007187, -0.06405208259820938, -0.03161904960870743, 0.13172820210456848, -0.07710760086774826, 0.002383741782978177, 0.09665529429912567, 0.02870638109743595, -0.08814111351966858, -0.056549157947301865, -0.19045110046863556, 0.1697200983762741, -0.007165777496993542, 0.05446209758520126, -0.06416589766740799, -0.006064625922590494, -0.017736876383423805, -0.08931878209114075, 0.08088928461074829, -0.016773484647274017, -0.0633930191397667, 0.00830618105828762, 0.03982727974653244, -0.0724496990442276, 0.19109594821929932, -0.037017352879047394, 0.06319669634103775, 0.03934157267212868, -0.035533357411623, 0.10945475846529007, 0.03728853538632393, -0.023462293669581413, -0.013526306487619877, -0.11713000386953354, -0.030110221356153488, -0.07320038974285126, 0.05973853915929794, -0.02462558075785637, -0.16704130172729492, -0.012793511152267456, -0.03193221613764763, 0.1559569537639618, 0.02385067194700241, 0.07552196085453033, 0.01785540021955967, 0.007665504701435566, 0.05758123844861984, -0.09118718653917313, -0.006318904459476471, 0.021383043378591537, -0.10838371515274048, -0.04529032111167908, -0.12358216196298599, -0.01751525327563286, -0.029460037127137184, 0.08829933404922485, -0.02756400592625141, 0.07356724143028259, 0.050535351037979126, -0.11119628697633743, 0.11024730652570724, -0.16816891729831696, -0.02502676285803318, -0.11922860145568848, -0.20990221202373505, 0.018241379410028458, -0.03499195724725723, -0.05992046371102333, 0.08577325940132141, -0.023082777857780457, -0.026705458760261536, 0.049401260912418365, -0.03389474004507065, 0.03677947074174881, -0.13260069489479065, 0.09534495323896408, -0.1433892697095871, 0.08841683715581894, -0.08257313072681427, -0.012392365373671055, -0.09969145804643631, -0.017649678513407707, -0.014050119556486607, 0.13853007555007935, -0.17402535676956177, 0.2086680382490158, -0.1302475482225418, 0.017647163942456245, -0.022506043314933777, -0.023789457976818085, -0.081495001912117, 0.1628333032131195, -0.19011598825454712, -0.0444178432226181, 0.05218236520886421, -0.054724860936403275, -0.12959124147891998, 0.020275313407182693, 0.03240421041846275, 0.11903777718544006, 0.13801231980323792, 0.3915906846523285, 0.01770360767841339, 0.06128203496336937, -0.13572046160697937, 0.1446988582611084, -0.05690497159957886, -0.07413218915462494, 0.09298288822174072, -0.08894964307546616, 0.028683513402938843, 0.03420953080058098, 0.09710090607404709, 0.1027480736374855, 0.007605470716953278, -0.08252320438623428, 0.04780681058764458, -0.060821082442998886, 0.11406951397657394, -0.02339242771267891, 0.14861416816711426, -0.0855870246887207, -0.009773649275302887, -0.008979515172541142, 0.033682681620121, 0.06224554777145386, -0.06473922729492188, -0.001276766532100737, 0.0237704049795866, -0.005623441189527512, 0.027537284418940544, -0.1351342499256134, -0.07248912006616592, -0.03265908360481262, 0.11289620399475098, 0.10024404525756836, -0.0603327676653862, 0.03365819528698921, -0.09521452337503433, -0.013580894097685814, 0.0859488844871521, 0.07575567066669464, 0.07228080928325653, -0.102956622838974, -0.13106168806552887, 0.08174938708543777, 0.025555353611707687, 0.11320957541465759, -0.004370789509266615, 0.004181153606623411, 0.1857762485742569, 0.08308781683444977, 0.019856473430991173, 0.07001205533742905, 0.01306935865432024, 0.026810895651578903, 0.019635815173387527, -0.07519403100013733, 0.026925507932901382, -0.08493847399950027, -0.11138256639242172, 0.030169470235705376, -0.047899242490530014, 0.0778830349445343, 0.17446230351924896, 0.08295896649360657, -0.04380527511239052, -0.0018253783928230405, 0.0056557259522378445, -0.0004646115121431649, 0.008645252324640751, -0.022901738062500954, 0.00025858188746497035, 0.026831675320863724, 0.09131226688623428, -0.02138746902346611, -0.05733954533934593, -0.005595517344772816, -0.0030067740008234978, -0.03414369001984596, 0.1424519568681717, 0.055273398756980896, -0.3264590799808502, 0.1887955665588379, 0.10582674294710159, 0.12778708338737488, 0.2432953119277954, -0.04926619306206703, 0.0181374941021204, -0.0057013207115232944, -0.0016223653219640255, -0.025464745238423347, 0.07620887458324432, -0.1615002155303955, -0.03670831769704819, 0.07952656596899033, 0.07912112772464752, 0.05961409956216812, -0.038577400147914886, -0.07758353650569916, -0.06712910532951355, -0.05268900468945503, -0.134782612323761, 0.04571947827935219, 0.022485340014100075, 0.14516565203666687, -0.005353383254259825, -0.008361284621059895, 0.06610338389873505, -0.04040836915373802, -0.08355454355478287, 0.08786028623580933, -0.1540786772966385, -0.266615629196167, -0.09198394417762756, 0.002778134308755398, -0.092100590467453, -0.027186235412955284, 0.028464866802096367, -0.1711236983537674, 0.01628100872039795, -0.034432314336299896, 0.03607313707470894, -0.05388077720999718, -0.004386982414871454, -0.07288797944784164, 0.09195505082607269, -0.0942193865776062, -0.0818900316953659, -0.027510231360793114, -0.0433221235871315, 0.06618894636631012, 0.047487691044807434, -0.1301177442073822, 0.07394459843635559, 0.14141727983951569, -0.04966311901807785, 0.06243152171373367, -0.09797490388154984, 0.10664571076631546, -0.14991170167922974, 0.06943544745445251, -0.009217305108904839, 0.04075545817613602, -0.008793899789452553, 0.21313118934631348, 0.05412731319665909, -0.039901088923215866, 0.002611590549349785, 0.03258318081498146, -0.043740164488554, -0.2928388714790344, -0.18359965085983276, -0.07022162526845932, 0.05841844528913498, 0.03774551302194595, 0.08479957282543182, 0.05510301515460014, 0.015790723264217377, -0.011860013008117676, 0.06725677102804184, 0.0524294450879097, 0.012912682257592678, 0.1973128616809845, 0.02969004027545452, 0.019515594467520714, -0.18313287198543549, -0.03533488139510155, 0.09858027845621109, 0.017012428492307663, 0.2516968846321106, 0.13661335408687592, 0.20146478712558746, 0.010334058664739132, -0.019296376034617424, 0.01928580179810524, 0.026086315512657166, 0.1164180263876915, 0.007169097196310759, -0.008517136797308922, -0.062008388340473175, 0.05582796037197113, 0.006216443609446287, -0.056909359991550446, -0.10634807497262955, -0.012450040318071842, -0.07139913737773895, 0.19648967683315277, -0.018028825521469116, 0.0464961864054203, -0.08564553409814835, 0.005642487667500973, 0.03796987980604172, -0.035764098167419434, -0.03097771480679512, 0.037947673350572586, 0.06215112283825874, -0.0809657871723175, 0.16146601736545563, -0.003924832679331303, 0.154158353805542, -0.03166641294956207, -0.0012511499226093292, -0.08365626633167267, -0.2035837173461914, -0.03183693066239357, 0.19029352068901062, -0.06318585574626923, 0.3175741732120514, 0.09998392313718796, 0.024005649611353874, -0.13480496406555176, -0.09595775604248047, 0.04819810017943382, 0.1305679976940155, 0.0981450229883194, 0.0505262091755867, -0.023875642567873, -0.11613587290048599, -0.1815359741449356, -0.024574195966124535, 0.08521541953086853, -0.04288323596119881, -0.03122490644454956, 0.01626681350171566, 0.022041285410523415, -0.037493567913770676, -0.1322324275970459, -0.1255718469619751, -0.16453611850738525, 0.02720888890326023, 0.13737890124320984, -0.054184507578611374, 0.022266479209065437, -0.05088146775960922, -0.05976055562496185, 0.13993287086486816, -0.16870084404945374, -0.1267508715391159, -0.08830935508012772, 0.005121954716742039, 0.07151277363300323, -0.038651999086141586, -0.063367560505867, -0.005309698171913624, -0.11217258870601654, -0.05723148584365845, -0.10970006883144379, 0.07604211568832397, 0.009478808380663395, -0.03558659180998802, -0.06084882467985153, 0.13844594359397888, -0.011526495218276978, 0.08168905228376389, -0.022215088829398155, 0.018815817311406136, 0.03226485475897789, -0.11871793866157532, -0.010436199605464935, 0.03804057464003563, 0.022697672247886658, 0.2129424810409546, 0.04267140105366707, -0.17354348301887512, -0.06488542258739471, -0.05482131242752075, 0.24925144016742706, 0.16205479204654694, -0.10258527100086212, 0.1503346562385559, 0.02254060097038746, -0.055489882826805115, -0.19668589532375336, -0.018263300880789757, -0.057232290506362915, -0.020562995225191116, -0.03972436115145683, -0.08200102299451828, 0.04452063888311386, 0.1044718325138092, -0.05453820899128914, 0.11331533640623093, -0.37221571803092957, -0.09284867346286774, 0.03838752582669258, -0.12989431619644165, 0.13516853749752045, -0.13386090099811554, -0.11002907156944275, -0.12661921977996826, 0.00907176174223423, 0.1310349553823471, -0.13114601373672485, 0.06030651554465294, -0.010388996452093124, 0.09991810470819473, 0.004819828551262617, 0.004869219847023487, 0.20689243078231812, 0.11520368605852127, 0.083783358335495, -0.10033766180276871, -0.2110649049282074, 0.23617622256278992, 0.03170052543282509, 0.07821276783943176, -0.13860169053077698, -0.0031882242765277624, -0.1787789762020111, -0.028839804232120514, -0.013868357054889202, 0.10060195624828339, 0.02194160968065262, -0.009755637496709824, -0.12503451108932495, -0.012375401332974434, -0.0606357678771019, 0.024751638993620872, 0.22143690288066864, 0.023622415959835052, -0.027129054069519043, -0.02111334353685379, 0.038297031074762344, -0.0283927321434021, 0.047466401010751724, -0.04637511819601059, -0.04891986772418022, 0.04733284190297127, -0.2176980972290039, -0.09409287571907043, 0.16286124289035797, 0.0004484902892727405, 0.07280352711677551, 0.004565923474729061, -0.045142851769924164, 0.03748932480812073, 0.1705983579158783, -0.029699163511395454, -0.13546741008758545, -0.023055195808410645, -0.07451782375574112, 0.02068278193473816, -0.028087379410862923, 0.0437094047665596, -0.007652000989764929, 0.03627060726284981, 0.006010924465954304, -0.005659663118422031, -0.013933571986854076, 0.12435722351074219, 0.057989105582237244, 0.003875704249367118, -0.14224758744239807, 0.17972584068775177, 0.18848150968551636, -0.023550106212496758, 0.02652309648692608, 0.12885645031929016, -0.15429458022117615, -0.06963760405778885, 0.020162731409072876, 0.1309221237897873, 0.11669600009918213, -0.08574744313955307, -0.04185373708605766, -0.058777179569005966, -0.031031697988510132, 0.09433172643184662, 0.01644132472574711, 0.05625814571976662, -0.01934218965470791, -0.10280811041593552, 0.07333207875490189, 0.07864423841238022, -0.02147424966096878, -0.04964645951986313, -0.1629551351070404, 0.009728294797241688, 0.032144270837306976, 0.03840010240674019, -0.02278674766421318, -0.01663057878613472, -0.0655013918876648, 0.009308701381087303, -0.09061254560947418, 0.08577010035514832, -0.08823221176862717, 0.02730582468211651, -0.010776925832033157, -0.02917986735701561, -0.06593696773052216, -0.1004849225282669, -0.09882408380508423, -0.011588134802877903, -0.009475947357714176, 0.15496405959129333, -0.135396346449852, -0.05266989395022392, 0.012013344094157219, 0.05396376550197601, 0.1134377047419548, 0.1471252739429474, -0.008485261350870132, 0.11326557397842407, -0.12651264667510986, 0.0010742208687588573, 0.09954210370779037, 0.04538703337311745, 0.06620576232671738, 0.09907102584838867, -0.017932206392288208, 0.034452106803655624, 0.051716141402721405, 0.1611444354057312, -0.05816411226987839, -0.10907444357872009, -0.014336968772113323, -0.15089434385299683, -0.048117123544216156, -0.021563759073615074, 0.05615367740392685, 0.1513223797082901, -0.01439180038869381, 0.12283250689506531, -0.033113718032836914, 0.030853375792503357, -0.06567156314849854, 0.023487288504838943, -0.09422843903303146, -0.1275697499513626, -0.07179570943117142, -0.030862493440508842, 0.04035235941410065, -0.04813333973288536, 0.3067140579223633, 0.21084432303905487, -0.05897948518395424, 0.0330415815114975, 0.14835597574710846, -0.020245740190148354, -0.022767284885048866, 0.25365447998046875, 0.06505510210990906, -0.02764585241675377, -0.08564010262489319, -0.0551060326397419, -0.006221585441380739, -0.04667239636182785, 0.14769990742206573, 0.11573854833841324, 0.010746140964329243, -0.004821090493351221, 0.06378000229597092, -0.12527789175510406, -0.0025096542667597532, -0.03415454551577568, 0.02226121537387371, 0.04579043388366699, -0.03502102568745613, -0.02889486588537693, 0.13220155239105225, -0.09282595664262772, -0.020799798890948296, -0.04498402029275894, -0.07562927901744843, -0.01852511242032051, -0.10240649431943893, -0.02861732803285122, -0.09569302201271057, 0.008436571806669235, -0.03158688172698021, 0.010410387068986893, 0.11812232434749603, 0.01837324909865856, 0.010974729433655739, 0.08283206075429916, -0.19832448661327362, -0.04405651241540909, 0.07327675819396973, -0.05011114105582237, -0.01837252452969551, -0.13303519785404205, -0.02974291890859604, 0.0072905453853309155, -0.0654696375131607, -0.031671490520238876, 0.09579192847013474, -0.02302209846675396, -0.022908756509423256, -0.17323151230812073, -0.06332312524318695, -0.02297144941985607, 0.05873504653573036, 0.05313771218061447, 0.16155245900154114, 0.09900229424238205, -0.0351589098572731, -0.002914066193625331, 0.19203364849090576, 0.035282038152217865, -0.16651128232479095, 0.022025782614946365, 0.026559051126241684, 0.008418492041528225, 0.03111990913748741, -0.01633688062429428, -0.14536811411380768, -0.03845015913248062, 0.05828402191400528, 0.38651737570762634, 0.028368519619107246, 0.020132316276431084, -0.09994959831237793, 0.031456589698791504, 0.04357022047042847, 0.057581353932619095, -0.05274792015552521, 0.14541031420230865, 0.03674635291099548, 0.04681246727705002, -0.014372825622558594, -0.0229983888566494, -0.1353490948677063, 0.06290198117494583, 0.07102818042039871, -0.10064653307199478, -0.059448711574077606, 0.24003662168979645, -0.14889413118362427, -0.011358099058270454, 0.06736477464437485, -0.1320396065711975, -0.12273560464382172, 0.029509540647268295, 0.07499150931835175, 0.0818362832069397, 0.01930757611989975, -0.02095561847090721, -0.04411185905337334, -0.15232495963573456, 0.03257792815566063, -0.2803584933280945, -0.12824411690235138, 0.048202045261859894, -0.02690073661506176, 0.13220837712287903, -0.04005570709705353, 0.20297320187091827, 0.06678558140993118, -0.025513924658298492, -0.07789117097854614, 0.07576555758714676, 0.05919405445456505, 0.18938186764717102, 0.06922761350870132, -0.04415951296687126, -0.03634575754404068, -0.08553796261548996, 0.1493697315454483, -0.019608210772275925, 0.0004785281198564917, 0.06971906870603561, 0.012269689701497555, -0.13361521065235138, 0.04692523553967476, -0.049699436873197556, 0.058150071650743484, 0.12178003787994385, -0.05129953473806381, 0.027546554803848267, 0.02162451110780239, -0.017099058255553246, 0.08886930346488953, -0.10727820545434952, -0.06720130890607834, -0.08935336768627167, -0.10042057186365128, 0.02965139038860798, 0.07038546353578568, -0.12792156636714935, 0.05618426203727722, -0.14157359302043915, 0.03335569053888321, -0.04439123347401619, 0.07617458701133728, 0.10445071011781693, -0.07996063679456711, -0.04391647130250931, -0.2006022036075592, 0.04821702837944031, 0.03155623376369476, -0.07886034995317459, -0.040116939693689346 ]
c1fc1a0ad76b98f79a63cd4d6680b9610c41074e
# Dataset Card for Evaluation run of appvoid/palmer-002.5 <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [appvoid/palmer-002.5](https://huggingface.co/appvoid/palmer-002.5) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_appvoid__palmer-002.5", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-01-25T15:49:21.576501](https://huggingface.co/datasets/open-llm-leaderboard/details_appvoid__palmer-002.5/blob/main/results_2024-01-25T15-49-21.576501.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.26026405988403956, "acc_stderr": 0.030773563546552134, "acc_norm": 0.26041347392018827, "acc_norm_stderr": 0.03152706572860613, "mc1": 0.2631578947368421, "mc1_stderr": 0.01541524174023701, "mc2": 0.4022295069003637, "mc2_stderr": 0.01452265399729067 }, "harness|arc:challenge|25": { "acc": 0.3575085324232082, "acc_stderr": 0.014005494275916571, "acc_norm": 0.37542662116040953, "acc_norm_stderr": 0.014150631435111728 }, "harness|hellaswag|10": { "acc": 0.4645488946425015, "acc_stderr": 0.004977223485342026, "acc_norm": 0.6184027086237801, "acc_norm_stderr": 0.00484785754695748 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.18, "acc_stderr": 0.038612291966536975, "acc_norm": 0.18, "acc_norm_stderr": 0.038612291966536975 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.24444444444444444, "acc_stderr": 0.037125378336148665, "acc_norm": 0.24444444444444444, "acc_norm_stderr": 0.037125378336148665 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.19078947368421054, "acc_stderr": 0.031975658210324984, "acc_norm": 0.19078947368421054, "acc_norm_stderr": 0.031975658210324984 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.23, "acc_stderr": 0.042295258468165065, "acc_norm": 0.23, "acc_norm_stderr": 0.042295258468165065 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.2679245283018868, "acc_stderr": 0.027257260322494845, "acc_norm": 0.2679245283018868, "acc_norm_stderr": 0.027257260322494845 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.25, "acc_stderr": 0.03621034121889507, "acc_norm": 0.25, "acc_norm_stderr": 0.03621034121889507 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.27, "acc_stderr": 0.0446196043338474, "acc_norm": 0.27, "acc_norm_stderr": 0.0446196043338474 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.25, "acc_stderr": 0.04351941398892446, "acc_norm": 0.25, "acc_norm_stderr": 0.04351941398892446 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.35, "acc_stderr": 0.0479372485441102, "acc_norm": 0.35, "acc_norm_stderr": 0.0479372485441102 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.18497109826589594, "acc_stderr": 0.029605623981771214, "acc_norm": 0.18497109826589594, "acc_norm_stderr": 0.029605623981771214 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.24509803921568626, "acc_stderr": 0.042801058373643966, "acc_norm": 0.24509803921568626, "acc_norm_stderr": 0.042801058373643966 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.27, "acc_stderr": 0.0446196043338474, "acc_norm": 0.27, "acc_norm_stderr": 0.0446196043338474 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.2297872340425532, "acc_stderr": 0.027501752944412424, "acc_norm": 0.2297872340425532, "acc_norm_stderr": 0.027501752944412424 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.17543859649122806, "acc_stderr": 0.0357795481394837, "acc_norm": 0.17543859649122806, "acc_norm_stderr": 0.0357795481394837 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.23448275862068965, "acc_stderr": 0.035306258743465914, "acc_norm": 0.23448275862068965, "acc_norm_stderr": 0.035306258743465914 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.2566137566137566, "acc_stderr": 0.022494510767503154, "acc_norm": 0.2566137566137566, "acc_norm_stderr": 0.022494510767503154 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.15873015873015872, "acc_stderr": 0.032684540130117436, "acc_norm": 0.15873015873015872, "acc_norm_stderr": 0.032684540130117436 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.21, "acc_stderr": 0.040936018074033256, "acc_norm": 0.21, "acc_norm_stderr": 0.040936018074033256 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.25161290322580643, "acc_stderr": 0.02468597928623997, "acc_norm": 0.25161290322580643, "acc_norm_stderr": 0.02468597928623997 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.23645320197044334, "acc_stderr": 0.029896114291733552, "acc_norm": 0.23645320197044334, "acc_norm_stderr": 0.029896114291733552 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.29, "acc_stderr": 0.045604802157206845, "acc_norm": 0.29, "acc_norm_stderr": 0.045604802157206845 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.2727272727272727, "acc_stderr": 0.03477691162163659, "acc_norm": 0.2727272727272727, "acc_norm_stderr": 0.03477691162163659 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.21212121212121213, "acc_stderr": 0.02912652283458682, "acc_norm": 0.21212121212121213, "acc_norm_stderr": 0.02912652283458682 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.24870466321243523, "acc_stderr": 0.031195840877700307, "acc_norm": 0.24870466321243523, "acc_norm_stderr": 0.031195840877700307 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.2846153846153846, "acc_stderr": 0.022878322799706287, "acc_norm": 0.2846153846153846, "acc_norm_stderr": 0.022878322799706287 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.26296296296296295, "acc_stderr": 0.02684205787383371, "acc_norm": 0.26296296296296295, "acc_norm_stderr": 0.02684205787383371 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.24789915966386555, "acc_stderr": 0.028047967224176892, "acc_norm": 0.24789915966386555, "acc_norm_stderr": 0.028047967224176892 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.2052980132450331, "acc_stderr": 0.03297986648473836, "acc_norm": 0.2052980132450331, "acc_norm_stderr": 0.03297986648473836 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.24403669724770644, "acc_stderr": 0.01841528635141641, "acc_norm": 0.24403669724770644, "acc_norm_stderr": 0.01841528635141641 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.3611111111111111, "acc_stderr": 0.032757734861009996, "acc_norm": 0.3611111111111111, "acc_norm_stderr": 0.032757734861009996 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.27450980392156865, "acc_stderr": 0.031321798030832904, "acc_norm": 0.27450980392156865, "acc_norm_stderr": 0.031321798030832904 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.25316455696202533, "acc_stderr": 0.028304657943035303, "acc_norm": 0.25316455696202533, "acc_norm_stderr": 0.028304657943035303 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.36771300448430494, "acc_stderr": 0.03236198350928276, "acc_norm": 0.36771300448430494, "acc_norm_stderr": 0.03236198350928276 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.2366412213740458, "acc_stderr": 0.0372767357559692, "acc_norm": 0.2366412213740458, "acc_norm_stderr": 0.0372767357559692 }, "harness|hendrycksTest-international_law|5": { "acc": 0.24793388429752067, "acc_stderr": 0.039418975265163025, "acc_norm": 0.24793388429752067, "acc_norm_stderr": 0.039418975265163025 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.24074074074074073, "acc_stderr": 0.04133119440243838, "acc_norm": 0.24074074074074073, "acc_norm_stderr": 0.04133119440243838 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.26380368098159507, "acc_stderr": 0.034624199316156234, "acc_norm": 0.26380368098159507, "acc_norm_stderr": 0.034624199316156234 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.2857142857142857, "acc_stderr": 0.04287858751340456, "acc_norm": 0.2857142857142857, "acc_norm_stderr": 0.04287858751340456 }, "harness|hendrycksTest-management|5": { "acc": 0.23300970873786409, "acc_stderr": 0.041858325989283164, "acc_norm": 0.23300970873786409, "acc_norm_stderr": 0.041858325989283164 }, "harness|hendrycksTest-marketing|5": { "acc": 0.27350427350427353, "acc_stderr": 0.029202540153431166, "acc_norm": 0.27350427350427353, "acc_norm_stderr": 0.029202540153431166 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.29, "acc_stderr": 0.04560480215720684, "acc_norm": 0.29, "acc_norm_stderr": 0.04560480215720684 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.2656449553001277, "acc_stderr": 0.015794302487888715, "acc_norm": 0.2656449553001277, "acc_norm_stderr": 0.015794302487888715 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.26011560693641617, "acc_stderr": 0.023618678310069356, "acc_norm": 0.26011560693641617, "acc_norm_stderr": 0.023618678310069356 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.27150837988826815, "acc_stderr": 0.01487425216809527, "acc_norm": 0.27150837988826815, "acc_norm_stderr": 0.01487425216809527 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.25163398692810457, "acc_stderr": 0.024848018263875195, "acc_norm": 0.25163398692810457, "acc_norm_stderr": 0.024848018263875195 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.2765273311897106, "acc_stderr": 0.02540383297817961, "acc_norm": 0.2765273311897106, "acc_norm_stderr": 0.02540383297817961 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.2623456790123457, "acc_stderr": 0.02447722285613511, "acc_norm": 0.2623456790123457, "acc_norm_stderr": 0.02447722285613511 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.25177304964539005, "acc_stderr": 0.0258921511567094, "acc_norm": 0.25177304964539005, "acc_norm_stderr": 0.0258921511567094 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.24445893089960888, "acc_stderr": 0.010976425013113909, "acc_norm": 0.24445893089960888, "acc_norm_stderr": 0.010976425013113909 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.2426470588235294, "acc_stderr": 0.02604066247420126, "acc_norm": 0.2426470588235294, "acc_norm_stderr": 0.02604066247420126 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.2679738562091503, "acc_stderr": 0.017917974069594722, "acc_norm": 0.2679738562091503, "acc_norm_stderr": 0.017917974069594722 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.3090909090909091, "acc_stderr": 0.044262946482000985, "acc_norm": 0.3090909090909091, "acc_norm_stderr": 0.044262946482000985 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.14285714285714285, "acc_stderr": 0.02240178743525639, "acc_norm": 0.14285714285714285, "acc_norm_stderr": 0.02240178743525639 }, "harness|hendrycksTest-sociology|5": { "acc": 0.2537313432835821, "acc_stderr": 0.03076944496729602, "acc_norm": 0.2537313432835821, "acc_norm_stderr": 0.03076944496729602 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.24, "acc_stderr": 0.04292346959909283, "acc_norm": 0.24, "acc_norm_stderr": 0.04292346959909283 }, "harness|hendrycksTest-virology|5": { "acc": 0.3313253012048193, "acc_stderr": 0.03664314777288087, "acc_norm": 0.3313253012048193, "acc_norm_stderr": 0.03664314777288087 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.21637426900584794, "acc_stderr": 0.03158149539338734, "acc_norm": 0.21637426900584794, "acc_norm_stderr": 0.03158149539338734 }, "harness|truthfulqa:mc|0": { "mc1": 0.2631578947368421, "mc1_stderr": 0.01541524174023701, "mc2": 0.4022295069003637, "mc2_stderr": 0.01452265399729067 }, "harness|winogrande|5": { "acc": 0.6637726913970008, "acc_stderr": 0.013277286593993447 }, "harness|gsm8k|5": { "acc": 0.019711902956785442, "acc_stderr": 0.0038289829787356987 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_appvoid__palmer-002.5
[ "region:us" ]
2024-01-25T15:51:10+00:00
{"pretty_name": "Evaluation run of appvoid/palmer-002.5", "dataset_summary": "Dataset automatically created during the evaluation run of model [appvoid/palmer-002.5](https://huggingface.co/appvoid/palmer-002.5) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_appvoid__palmer-002.5\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-01-25T15:49:21.576501](https://huggingface.co/datasets/open-llm-leaderboard/details_appvoid__palmer-002.5/blob/main/results_2024-01-25T15-49-21.576501.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.26026405988403956,\n \"acc_stderr\": 0.030773563546552134,\n \"acc_norm\": 0.26041347392018827,\n \"acc_norm_stderr\": 0.03152706572860613,\n \"mc1\": 0.2631578947368421,\n \"mc1_stderr\": 0.01541524174023701,\n \"mc2\": 0.4022295069003637,\n \"mc2_stderr\": 0.01452265399729067\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.3575085324232082,\n \"acc_stderr\": 0.014005494275916571,\n \"acc_norm\": 0.37542662116040953,\n \"acc_norm_stderr\": 0.014150631435111728\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.4645488946425015,\n \"acc_stderr\": 0.004977223485342026,\n \"acc_norm\": 0.6184027086237801,\n \"acc_norm_stderr\": 0.00484785754695748\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.18,\n \"acc_stderr\": 0.038612291966536975,\n \"acc_norm\": 0.18,\n \"acc_norm_stderr\": 0.038612291966536975\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.24444444444444444,\n \"acc_stderr\": 0.037125378336148665,\n \"acc_norm\": 0.24444444444444444,\n \"acc_norm_stderr\": 0.037125378336148665\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.19078947368421054,\n \"acc_stderr\": 0.031975658210324984,\n \"acc_norm\": 0.19078947368421054,\n \"acc_norm_stderr\": 0.031975658210324984\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.23,\n \"acc_stderr\": 0.042295258468165065,\n \"acc_norm\": 0.23,\n \"acc_norm_stderr\": 0.042295258468165065\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.2679245283018868,\n \"acc_stderr\": 0.027257260322494845,\n \"acc_norm\": 0.2679245283018868,\n \"acc_norm_stderr\": 0.027257260322494845\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.25,\n \"acc_stderr\": 0.03621034121889507,\n \"acc_norm\": 0.25,\n \"acc_norm_stderr\": 0.03621034121889507\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.27,\n \"acc_stderr\": 0.0446196043338474,\n \"acc_norm\": 0.27,\n \"acc_norm_stderr\": 0.0446196043338474\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.25,\n \"acc_stderr\": 0.04351941398892446,\n \"acc_norm\": 0.25,\n \"acc_norm_stderr\": 0.04351941398892446\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.35,\n \"acc_stderr\": 0.0479372485441102,\n \"acc_norm\": 0.35,\n \"acc_norm_stderr\": 0.0479372485441102\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.18497109826589594,\n \"acc_stderr\": 0.029605623981771214,\n \"acc_norm\": 0.18497109826589594,\n \"acc_norm_stderr\": 0.029605623981771214\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.24509803921568626,\n \"acc_stderr\": 0.042801058373643966,\n \"acc_norm\": 0.24509803921568626,\n \"acc_norm_stderr\": 0.042801058373643966\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.27,\n \"acc_stderr\": 0.0446196043338474,\n \"acc_norm\": 0.27,\n \"acc_norm_stderr\": 0.0446196043338474\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.2297872340425532,\n \"acc_stderr\": 0.027501752944412424,\n \"acc_norm\": 0.2297872340425532,\n \"acc_norm_stderr\": 0.027501752944412424\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.17543859649122806,\n \"acc_stderr\": 0.0357795481394837,\n \"acc_norm\": 0.17543859649122806,\n \"acc_norm_stderr\": 0.0357795481394837\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.23448275862068965,\n \"acc_stderr\": 0.035306258743465914,\n \"acc_norm\": 0.23448275862068965,\n \"acc_norm_stderr\": 0.035306258743465914\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.2566137566137566,\n \"acc_stderr\": 0.022494510767503154,\n \"acc_norm\": 0.2566137566137566,\n \"acc_norm_stderr\": 0.022494510767503154\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.15873015873015872,\n \"acc_stderr\": 0.032684540130117436,\n \"acc_norm\": 0.15873015873015872,\n \"acc_norm_stderr\": 0.032684540130117436\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.21,\n \"acc_stderr\": 0.040936018074033256,\n \"acc_norm\": 0.21,\n \"acc_norm_stderr\": 0.040936018074033256\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.25161290322580643,\n \"acc_stderr\": 0.02468597928623997,\n \"acc_norm\": 0.25161290322580643,\n \"acc_norm_stderr\": 0.02468597928623997\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.23645320197044334,\n \"acc_stderr\": 0.029896114291733552,\n \"acc_norm\": 0.23645320197044334,\n \"acc_norm_stderr\": 0.029896114291733552\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.29,\n \"acc_stderr\": 0.045604802157206845,\n \"acc_norm\": 0.29,\n \"acc_norm_stderr\": 0.045604802157206845\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.2727272727272727,\n \"acc_stderr\": 0.03477691162163659,\n \"acc_norm\": 0.2727272727272727,\n \"acc_norm_stderr\": 0.03477691162163659\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.21212121212121213,\n \"acc_stderr\": 0.02912652283458682,\n \"acc_norm\": 0.21212121212121213,\n \"acc_norm_stderr\": 0.02912652283458682\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.24870466321243523,\n \"acc_stderr\": 0.031195840877700307,\n \"acc_norm\": 0.24870466321243523,\n \"acc_norm_stderr\": 0.031195840877700307\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.2846153846153846,\n \"acc_stderr\": 0.022878322799706287,\n \"acc_norm\": 0.2846153846153846,\n \"acc_norm_stderr\": 0.022878322799706287\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.26296296296296295,\n \"acc_stderr\": 0.02684205787383371,\n \"acc_norm\": 0.26296296296296295,\n \"acc_norm_stderr\": 0.02684205787383371\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.24789915966386555,\n \"acc_stderr\": 0.028047967224176892,\n \"acc_norm\": 0.24789915966386555,\n \"acc_norm_stderr\": 0.028047967224176892\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.2052980132450331,\n \"acc_stderr\": 0.03297986648473836,\n \"acc_norm\": 0.2052980132450331,\n \"acc_norm_stderr\": 0.03297986648473836\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.24403669724770644,\n \"acc_stderr\": 0.01841528635141641,\n \"acc_norm\": 0.24403669724770644,\n \"acc_norm_stderr\": 0.01841528635141641\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.3611111111111111,\n \"acc_stderr\": 0.032757734861009996,\n \"acc_norm\": 0.3611111111111111,\n \"acc_norm_stderr\": 0.032757734861009996\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.27450980392156865,\n \"acc_stderr\": 0.031321798030832904,\n \"acc_norm\": 0.27450980392156865,\n \"acc_norm_stderr\": 0.031321798030832904\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.25316455696202533,\n \"acc_stderr\": 0.028304657943035303,\n \"acc_norm\": 0.25316455696202533,\n \"acc_norm_stderr\": 0.028304657943035303\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.36771300448430494,\n \"acc_stderr\": 0.03236198350928276,\n \"acc_norm\": 0.36771300448430494,\n \"acc_norm_stderr\": 0.03236198350928276\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.2366412213740458,\n \"acc_stderr\": 0.0372767357559692,\n \"acc_norm\": 0.2366412213740458,\n \"acc_norm_stderr\": 0.0372767357559692\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.24793388429752067,\n \"acc_stderr\": 0.039418975265163025,\n \"acc_norm\": 0.24793388429752067,\n \"acc_norm_stderr\": 0.039418975265163025\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.24074074074074073,\n \"acc_stderr\": 0.04133119440243838,\n \"acc_norm\": 0.24074074074074073,\n \"acc_norm_stderr\": 0.04133119440243838\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.26380368098159507,\n \"acc_stderr\": 0.034624199316156234,\n \"acc_norm\": 0.26380368098159507,\n \"acc_norm_stderr\": 0.034624199316156234\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.2857142857142857,\n \"acc_stderr\": 0.04287858751340456,\n \"acc_norm\": 0.2857142857142857,\n \"acc_norm_stderr\": 0.04287858751340456\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.23300970873786409,\n \"acc_stderr\": 0.041858325989283164,\n \"acc_norm\": 0.23300970873786409,\n \"acc_norm_stderr\": 0.041858325989283164\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.27350427350427353,\n \"acc_stderr\": 0.029202540153431166,\n \"acc_norm\": 0.27350427350427353,\n \"acc_norm_stderr\": 0.029202540153431166\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.29,\n \"acc_stderr\": 0.04560480215720684,\n \"acc_norm\": 0.29,\n \"acc_norm_stderr\": 0.04560480215720684\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.2656449553001277,\n \"acc_stderr\": 0.015794302487888715,\n \"acc_norm\": 0.2656449553001277,\n \"acc_norm_stderr\": 0.015794302487888715\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.26011560693641617,\n \"acc_stderr\": 0.023618678310069356,\n \"acc_norm\": 0.26011560693641617,\n \"acc_norm_stderr\": 0.023618678310069356\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.27150837988826815,\n \"acc_stderr\": 0.01487425216809527,\n \"acc_norm\": 0.27150837988826815,\n \"acc_norm_stderr\": 0.01487425216809527\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.25163398692810457,\n \"acc_stderr\": 0.024848018263875195,\n \"acc_norm\": 0.25163398692810457,\n \"acc_norm_stderr\": 0.024848018263875195\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.2765273311897106,\n \"acc_stderr\": 0.02540383297817961,\n \"acc_norm\": 0.2765273311897106,\n \"acc_norm_stderr\": 0.02540383297817961\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.2623456790123457,\n \"acc_stderr\": 0.02447722285613511,\n \"acc_norm\": 0.2623456790123457,\n \"acc_norm_stderr\": 0.02447722285613511\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.25177304964539005,\n \"acc_stderr\": 0.0258921511567094,\n \"acc_norm\": 0.25177304964539005,\n \"acc_norm_stderr\": 0.0258921511567094\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.24445893089960888,\n \"acc_stderr\": 0.010976425013113909,\n \"acc_norm\": 0.24445893089960888,\n \"acc_norm_stderr\": 0.010976425013113909\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.2426470588235294,\n \"acc_stderr\": 0.02604066247420126,\n \"acc_norm\": 0.2426470588235294,\n \"acc_norm_stderr\": 0.02604066247420126\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.2679738562091503,\n \"acc_stderr\": 0.017917974069594722,\n \"acc_norm\": 0.2679738562091503,\n \"acc_norm_stderr\": 0.017917974069594722\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.3090909090909091,\n \"acc_stderr\": 0.044262946482000985,\n \"acc_norm\": 0.3090909090909091,\n \"acc_norm_stderr\": 0.044262946482000985\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.14285714285714285,\n \"acc_stderr\": 0.02240178743525639,\n \"acc_norm\": 0.14285714285714285,\n \"acc_norm_stderr\": 0.02240178743525639\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.2537313432835821,\n \"acc_stderr\": 0.03076944496729602,\n \"acc_norm\": 0.2537313432835821,\n \"acc_norm_stderr\": 0.03076944496729602\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.24,\n \"acc_stderr\": 0.04292346959909283,\n \"acc_norm\": 0.24,\n \"acc_norm_stderr\": 0.04292346959909283\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.3313253012048193,\n \"acc_stderr\": 0.03664314777288087,\n \"acc_norm\": 0.3313253012048193,\n \"acc_norm_stderr\": 0.03664314777288087\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.21637426900584794,\n \"acc_stderr\": 0.03158149539338734,\n \"acc_norm\": 0.21637426900584794,\n \"acc_norm_stderr\": 0.03158149539338734\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.2631578947368421,\n \"mc1_stderr\": 0.01541524174023701,\n \"mc2\": 0.4022295069003637,\n \"mc2_stderr\": 0.01452265399729067\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.6637726913970008,\n \"acc_stderr\": 0.013277286593993447\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.019711902956785442,\n \"acc_stderr\": 0.0038289829787356987\n }\n}\n```", "repo_url": "https://huggingface.co/appvoid/palmer-002.5", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_01_25T15_49_21.576501", "path": ["**/details_harness|arc:challenge|25_2024-01-25T15-49-21.576501.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-01-25T15-49-21.576501.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_01_25T15_49_21.576501", "path": ["**/details_harness|gsm8k|5_2024-01-25T15-49-21.576501.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-01-25T15-49-21.576501.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_01_25T15_49_21.576501", "path": ["**/details_harness|hellaswag|10_2024-01-25T15-49-21.576501.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-01-25T15-49-21.576501.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_01_25T15_49_21.576501", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-25T15-49-21.576501.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-25T15-49-21.576501.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-25T15-49-21.576501.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-25T15-49-21.576501.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-25T15-49-21.576501.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-25T15-49-21.576501.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-25T15-49-21.576501.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-25T15-49-21.576501.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-25T15-49-21.576501.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-25T15-49-21.576501.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-25T15-49-21.576501.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-25T15-49-21.576501.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-25T15-49-21.576501.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-25T15-49-21.576501.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-25T15-49-21.576501.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-25T15-49-21.576501.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-25T15-49-21.576501.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-25T15-49-21.576501.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-25T15-49-21.576501.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-25T15-49-21.576501.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-25T15-49-21.576501.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-25T15-49-21.576501.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-25T15-49-21.576501.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-25T15-49-21.576501.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-25T15-49-21.576501.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-25T15-49-21.576501.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-25T15-49-21.576501.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-25T15-49-21.576501.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-25T15-49-21.576501.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-25T15-49-21.576501.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-25T15-49-21.576501.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-25T15-49-21.576501.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-25T15-49-21.576501.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-25T15-49-21.576501.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-25T15-49-21.576501.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-25T15-49-21.576501.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-25T15-49-21.576501.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-25T15-49-21.576501.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-25T15-49-21.576501.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-25T15-49-21.576501.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-25T15-49-21.576501.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-25T15-49-21.576501.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-25T15-49-21.576501.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-25T15-49-21.576501.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-25T15-49-21.576501.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-25T15-49-21.576501.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-25T15-49-21.576501.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-25T15-49-21.576501.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-25T15-49-21.576501.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-25T15-49-21.576501.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-25T15-49-21.576501.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-25T15-49-21.576501.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-25T15-49-21.576501.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-25T15-49-21.576501.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-25T15-49-21.576501.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-25T15-49-21.576501.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-25T15-49-21.576501.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-25T15-49-21.576501.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-25T15-49-21.576501.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-25T15-49-21.576501.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-25T15-49-21.576501.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-25T15-49-21.576501.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-25T15-49-21.576501.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-25T15-49-21.576501.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-25T15-49-21.576501.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-25T15-49-21.576501.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-25T15-49-21.576501.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-25T15-49-21.576501.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-25T15-49-21.576501.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-25T15-49-21.576501.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-25T15-49-21.576501.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-25T15-49-21.576501.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-25T15-49-21.576501.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-25T15-49-21.576501.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-25T15-49-21.576501.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-25T15-49-21.576501.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-25T15-49-21.576501.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-25T15-49-21.576501.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-25T15-49-21.576501.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-25T15-49-21.576501.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-25T15-49-21.576501.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-25T15-49-21.576501.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-25T15-49-21.576501.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-25T15-49-21.576501.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-25T15-49-21.576501.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-25T15-49-21.576501.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-25T15-49-21.576501.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-25T15-49-21.576501.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-25T15-49-21.576501.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-25T15-49-21.576501.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-25T15-49-21.576501.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-25T15-49-21.576501.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-25T15-49-21.576501.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-25T15-49-21.576501.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-25T15-49-21.576501.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-25T15-49-21.576501.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-25T15-49-21.576501.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-25T15-49-21.576501.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-25T15-49-21.576501.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-25T15-49-21.576501.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-25T15-49-21.576501.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-25T15-49-21.576501.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-25T15-49-21.576501.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-25T15-49-21.576501.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-25T15-49-21.576501.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-25T15-49-21.576501.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-25T15-49-21.576501.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-25T15-49-21.576501.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-25T15-49-21.576501.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-25T15-49-21.576501.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-25T15-49-21.576501.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-25T15-49-21.576501.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-25T15-49-21.576501.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-25T15-49-21.576501.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_01_25T15_49_21.576501", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-25T15-49-21.576501.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-25T15-49-21.576501.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_01_25T15_49_21.576501", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-25T15-49-21.576501.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-25T15-49-21.576501.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_01_25T15_49_21.576501", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-25T15-49-21.576501.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-25T15-49-21.576501.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_01_25T15_49_21.576501", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-25T15-49-21.576501.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-25T15-49-21.576501.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_01_25T15_49_21.576501", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-25T15-49-21.576501.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-25T15-49-21.576501.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_01_25T15_49_21.576501", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-25T15-49-21.576501.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-25T15-49-21.576501.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_01_25T15_49_21.576501", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-25T15-49-21.576501.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-25T15-49-21.576501.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_01_25T15_49_21.576501", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-25T15-49-21.576501.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-25T15-49-21.576501.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_01_25T15_49_21.576501", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-25T15-49-21.576501.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-25T15-49-21.576501.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_01_25T15_49_21.576501", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-25T15-49-21.576501.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-25T15-49-21.576501.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_01_25T15_49_21.576501", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-25T15-49-21.576501.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-25T15-49-21.576501.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_01_25T15_49_21.576501", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-25T15-49-21.576501.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-25T15-49-21.576501.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_01_25T15_49_21.576501", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-25T15-49-21.576501.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-25T15-49-21.576501.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_01_25T15_49_21.576501", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-25T15-49-21.576501.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-25T15-49-21.576501.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_01_25T15_49_21.576501", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-25T15-49-21.576501.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-25T15-49-21.576501.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_01_25T15_49_21.576501", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-25T15-49-21.576501.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-25T15-49-21.576501.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_01_25T15_49_21.576501", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-25T15-49-21.576501.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-25T15-49-21.576501.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_01_25T15_49_21.576501", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-25T15-49-21.576501.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-25T15-49-21.576501.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_01_25T15_49_21.576501", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-25T15-49-21.576501.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-25T15-49-21.576501.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_01_25T15_49_21.576501", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-25T15-49-21.576501.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-25T15-49-21.576501.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_01_25T15_49_21.576501", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-25T15-49-21.576501.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-25T15-49-21.576501.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_01_25T15_49_21.576501", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-25T15-49-21.576501.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-25T15-49-21.576501.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_01_25T15_49_21.576501", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-25T15-49-21.576501.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-25T15-49-21.576501.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_01_25T15_49_21.576501", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-25T15-49-21.576501.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-25T15-49-21.576501.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_01_25T15_49_21.576501", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-25T15-49-21.576501.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-25T15-49-21.576501.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_01_25T15_49_21.576501", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-25T15-49-21.576501.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-25T15-49-21.576501.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_01_25T15_49_21.576501", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-25T15-49-21.576501.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-25T15-49-21.576501.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_01_25T15_49_21.576501", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-25T15-49-21.576501.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-25T15-49-21.576501.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_01_25T15_49_21.576501", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-25T15-49-21.576501.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-25T15-49-21.576501.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_01_25T15_49_21.576501", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-25T15-49-21.576501.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-25T15-49-21.576501.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_01_25T15_49_21.576501", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-25T15-49-21.576501.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-25T15-49-21.576501.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_01_25T15_49_21.576501", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-25T15-49-21.576501.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-25T15-49-21.576501.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_01_25T15_49_21.576501", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-25T15-49-21.576501.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-25T15-49-21.576501.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_01_25T15_49_21.576501", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-25T15-49-21.576501.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-25T15-49-21.576501.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_01_25T15_49_21.576501", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-25T15-49-21.576501.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-25T15-49-21.576501.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_01_25T15_49_21.576501", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-25T15-49-21.576501.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-25T15-49-21.576501.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_01_25T15_49_21.576501", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-25T15-49-21.576501.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-25T15-49-21.576501.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_01_25T15_49_21.576501", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-25T15-49-21.576501.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-25T15-49-21.576501.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_01_25T15_49_21.576501", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-25T15-49-21.576501.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-25T15-49-21.576501.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_01_25T15_49_21.576501", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-25T15-49-21.576501.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-25T15-49-21.576501.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_01_25T15_49_21.576501", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-25T15-49-21.576501.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-25T15-49-21.576501.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_01_25T15_49_21.576501", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-25T15-49-21.576501.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-25T15-49-21.576501.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_01_25T15_49_21.576501", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-25T15-49-21.576501.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-25T15-49-21.576501.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_01_25T15_49_21.576501", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-25T15-49-21.576501.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-25T15-49-21.576501.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_01_25T15_49_21.576501", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-25T15-49-21.576501.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-25T15-49-21.576501.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_01_25T15_49_21.576501", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-25T15-49-21.576501.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-25T15-49-21.576501.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_01_25T15_49_21.576501", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-25T15-49-21.576501.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-25T15-49-21.576501.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_01_25T15_49_21.576501", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-25T15-49-21.576501.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-25T15-49-21.576501.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_01_25T15_49_21.576501", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-25T15-49-21.576501.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-25T15-49-21.576501.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_01_25T15_49_21.576501", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-25T15-49-21.576501.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-25T15-49-21.576501.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_01_25T15_49_21.576501", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-25T15-49-21.576501.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-25T15-49-21.576501.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_01_25T15_49_21.576501", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-25T15-49-21.576501.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-25T15-49-21.576501.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_01_25T15_49_21.576501", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-25T15-49-21.576501.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-25T15-49-21.576501.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_01_25T15_49_21.576501", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-25T15-49-21.576501.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-25T15-49-21.576501.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_01_25T15_49_21.576501", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-25T15-49-21.576501.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-25T15-49-21.576501.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_01_25T15_49_21.576501", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-25T15-49-21.576501.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-25T15-49-21.576501.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_01_25T15_49_21.576501", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-25T15-49-21.576501.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-25T15-49-21.576501.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_01_25T15_49_21.576501", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-25T15-49-21.576501.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-25T15-49-21.576501.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_01_25T15_49_21.576501", "path": ["**/details_harness|winogrande|5_2024-01-25T15-49-21.576501.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-01-25T15-49-21.576501.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_01_25T15_49_21.576501", "path": ["results_2024-01-25T15-49-21.576501.parquet"]}, {"split": "latest", "path": ["results_2024-01-25T15-49-21.576501.parquet"]}]}]}
2024-01-25T15:51:37+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of appvoid/palmer-002.5 Dataset automatically created during the evaluation run of model appvoid/palmer-002.5 on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-01-25T15:49:21.576501(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of appvoid/palmer-002.5\n\n\n\nDataset automatically created during the evaluation run of model appvoid/palmer-002.5 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-25T15:49:21.576501(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of appvoid/palmer-002.5\n\n\n\nDataset automatically created during the evaluation run of model appvoid/palmer-002.5 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-25T15:49:21.576501(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ 6, 177, 68, 4, 40, 29, 3, 4, 9, 6, 5, 7, 4, 7, 10, 9, 5, 9, 8, 10, 46, 8, 7, 10, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of appvoid/palmer-002.5\n\n\n\nDataset automatically created during the evaluation run of model appvoid/palmer-002.5 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2024-01-25T15:49:21.576501(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):## Dataset Details### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:## Uses### Direct Use### Out-of-Scope Use## Dataset Structure## Dataset Creation### Curation Rationale### Source Data#### Data Collection and Processing#### Who are the source data producers?### Annotations [optional]#### Annotation process#### Who are the annotators?#### Personal and Sensitive Information## Bias, Risks, and Limitations### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:## Glossary [optional]## More Information [optional]## Dataset Card Authors [optional]## Dataset Card Contact" ]
[ -0.04490723833441734, 0.18586532771587372, -0.006726647727191448, 0.03456798940896988, 0.06722073256969452, -0.004304335452616215, 0.038866542279720306, 0.10731539875268936, 0.03989645838737488, 0.16214293241500854, -0.005996866151690483, 0.10404123365879059, 0.096652090549469, 0.10940170288085938, 0.021454010158777237, -0.14088411629199982, 0.05201258137822151, -0.09122249484062195, 0.09809776395559311, 0.07245562970638275, 0.0652056634426117, -0.07940352708101273, 0.06141902133822441, -0.03152047097682953, 0.02877182513475418, -0.007008072920143604, -0.056499745696783066, -0.042136941105127335, 0.10442687571048737, 0.06848696619272232, 0.04475861415266991, -0.027713768184185028, 0.029749978333711624, -0.2801903784275055, 0.018336676061153412, 0.10517752170562744, 0.004555693827569485, 0.04824373126029968, 0.14200834929943085, -0.08086145669221878, 0.11385160684585571, -0.02892165631055832, 0.07653409242630005, 0.042676232755184174, -0.12153586000204086, -0.15278026461601257, -0.14231561124324799, 0.017911208793520927, 0.0821298211812973, 0.042428262531757355, -0.016770511865615845, 0.09801506996154785, -0.04845511540770531, 0.0542740635573864, 0.13034051656723022, -0.1747671514749527, -0.027358446270227432, 0.047050900757312775, 0.020606771111488342, 0.05546003580093384, -0.0856655165553093, -0.031064065173268318, 0.02528330497443676, 0.05354153737425804, 0.003188435221090913, 0.005418417975306511, 0.03646896779537201, 0.02230062149465084, -0.14407959580421448, -0.13288022577762604, 0.1027064099907875, -0.01271437481045723, -0.04371954873204231, -0.1630578339099884, -0.05929341912269592, -0.015552940778434277, 0.008809187449514866, 0.017992384731769562, 0.014633730053901672, 0.0027498078998178244, 0.07192347943782806, 0.00046866360935382545, -0.08668051660060883, -0.03439630940556526, -0.039433058351278305, 0.03157899156212807, 0.03490018472075462, 0.0036722389049828053, -0.006462583784013987, 0.14015154540538788, 0.03659437596797943, -0.0633835718035698, -0.08625884354114532, -0.04231521487236023, -0.13585123419761658, -0.03285735845565796, 0.030500264838337898, -0.07063835859298706, 0.04532410576939583, 0.24672289192676544, -0.04093218222260475, 0.03169165551662445, -0.09229426085948944, 0.015905657783150673, 0.11367584764957428, 0.09901502728462219, -0.05851636454463005, -0.08620736747980118, -0.05616028979420662, 0.017261669039726257, 0.030294889584183693, -0.02898596227169037, 0.029023202136158943, 0.07545562088489532, 0.02101762220263481, 0.13387952744960785, 0.12093227356672287, 0.021662529557943344, -0.07042641192674637, -0.013302103616297245, 0.1420287787914276, -0.169989675283432, 0.010116762481629848, 0.02034379541873932, -0.027625564485788345, -0.07168622314929962, 0.06386639177799225, -0.021449968218803406, -0.06829123198986053, 0.12217652797698975, -0.060400206595659256, -0.06577408313751221, -0.10207441449165344, -0.06817159056663513, 0.03797372058033943, -0.025641096755862236, -0.06481077522039413, -0.05837219953536987, -0.15017196536064148, -0.08577421307563782, 0.03498218581080437, -0.06566280126571655, -0.007455111481249332, 0.00002184557706641499, 0.01723630726337433, -0.014658210799098015, -0.013783703558146954, 0.1100446805357933, -0.07706344872713089, 0.037889618426561356, -0.05037103220820427, 0.0392741933465004, 0.10688658058643341, 0.030795305967330933, -0.12695984542369843, 0.09686153382062912, -0.08355039358139038, 0.09563301503658295, -0.08641993254423141, -0.02391122840344906, -0.11609286069869995, 0.016029244288802147, -0.013783184811472893, 0.01710348390042782, -0.01339761447161436, 0.0800413265824318, -0.20535291731357574, -0.011923028156161308, 0.17679303884506226, -0.1259072870016098, -0.062109485268592834, 0.06799785792827606, -0.02357226237654686, 0.058329273015260696, 0.04543053358793259, 0.08332467079162598, 0.0919489935040474, -0.08519357442855835, -0.10321036726236343, -0.06060321629047394, -0.03801073133945465, 0.14936092495918274, 0.05090714246034622, -0.08679250627756119, 0.08796186000108719, 0.034067049622535706, -0.001024648197926581, -0.04686124995350838, -0.009138801135122776, -0.05538713559508324, -0.0027670308481901884, -0.024555103853344917, -0.07481885701417923, -0.02260763943195343, -0.09107095748186111, -0.00046977782039903104, -0.06161141023039818, -0.0036522711161524057, 0.09794525802135468, -0.02258314937353134, 0.02914169244468212, -0.08655218034982681, 0.07209432870149612, -0.014429673552513123, 0.02071472257375717, -0.2100878655910492, -0.08540622144937515, 0.038409627974033356, -0.17965641617774963, 0.03690074384212494, 0.004804661497473717, 0.017062455415725708, 0.05144815146923065, 0.004245101939886808, 0.006773907691240311, 0.018371904268860817, -0.012849748134613037, -0.003465421497821808, -0.14766258001327515, -0.037793029099702835, -0.07018472999334335, 0.06478897482156754, -0.10277082026004791, -0.02432163618505001, 0.07392561435699463, 0.1583799123764038, 0.021777240559458733, -0.07925110310316086, 0.03668684884905815, 0.022514617070555687, -0.05511102080345154, -0.058038581162691116, 0.005496043246239424, -0.004326984286308289, 0.040736399590969086, 0.0730300322175026, -0.17888499796390533, -0.15150509774684906, 0.06831265240907669, 0.1336197853088379, -0.06964290887117386, -0.06457139551639557, -0.07119834423065186, -0.05610855668783188, -0.10453934222459793, -0.04898103326559067, 0.10307836532592773, 0.08994205296039581, 0.05495727062225342, -0.06203240901231766, -0.05801752582192421, -0.014098587445914745, 0.04007802531123161, -0.06607019156217575, 0.10757678002119064, 0.09862968325614929, -0.07763553410768509, 0.09998198598623276, -0.014605125412344933, 0.10968682914972305, 0.11184268444776535, 0.004990618210285902, -0.11830462515354156, -0.01661037839949131, 0.0692858174443245, 0.04283145070075989, 0.07786642014980316, -0.023439999669790268, 0.04532773792743683, 0.0826621800661087, 0.002487489487975836, 0.03439546376466751, -0.07993727177381516, 0.03533991798758507, 0.031444668769836426, -0.008901669643819332, 0.011369521729648113, 0.0033374554477632046, 0.03383663669228554, 0.09036323428153992, 0.00868805032223463, 0.06572866439819336, -0.026980377733707428, -0.04933600872755051, -0.08519855886697769, 0.1398308128118515, -0.08982707560062408, -0.22915509343147278, -0.16820171475410461, -0.049268938601017, -0.031158016994595528, -0.0166727714240551, 0.04773994907736778, 0.009485782124102116, -0.10459353029727936, -0.1091834232211113, 0.054056670516729355, 0.04390230402350426, -0.11826545745134354, -0.03184034302830696, 0.019963761791586876, -0.010747703723609447, -0.16347679495811462, 0.021091628819704056, 0.034024886786937714, -0.06784553825855255, 0.021230097860097885, 0.0913998931646347, 0.12395454198122025, 0.1109527051448822, 0.0789918601512909, -0.018391607329249382, -0.009547250345349312, 0.15611882507801056, -0.11581733822822571, 0.031498756259679794, 0.09435740858316422, -0.03815269470214844, 0.07975633442401886, 0.14933983981609344, 0.001986913848668337, -0.08010992407798767, 0.0394010916352272, 0.09726943075656891, -0.06534888595342636, -0.26425686478614807, -0.07921918481588364, -0.02876168116927147, 0.06858418136835098, 0.10762880742549896, 0.08204106986522675, -0.016611622646450996, -0.001956735970452428, -0.10338509827852249, -0.04493001475930214, -0.028170857578516006, 0.06736471503973007, 0.04813466593623161, -0.011984635144472122, 0.045651908963918686, -0.05477907508611679, 0.026387691497802734, 0.12348458170890808, 0.031074287369847298, 0.18006519973278046, -0.04526834189891815, 0.17566163837909698, 0.10019808262586594, 0.0688321515917778, -0.02182517573237419, 0.07704068720340729, -0.015536235645413399, 0.07343901693820953, -0.007836582139134407, -0.08631875365972519, -0.03257044404745102, 0.09295704215765, 0.049807582050561905, -0.031181707978248596, 0.0590314082801342, -0.054464951157569885, 0.06172452121973038, 0.26897290349006653, 0.007327436935156584, -0.1419946402311325, -0.029275665059685707, 0.05152132362127304, -0.05122295022010803, -0.09975975751876831, 0.0076537057757377625, 0.08548804372549057, -0.15970440208911896, 0.03581738471984863, -0.040545862168073654, 0.07404733449220657, -0.15239007771015167, -0.03421174734830856, -0.007575242314487696, 0.04976264014840126, -0.023402612656354904, 0.09485732764005661, -0.1631796509027481, 0.08599059283733368, -0.017640013247728348, 0.023897727951407433, -0.06454688310623169, 0.0740978941321373, -0.016288891434669495, -0.07261466234922409, 0.14723865687847137, 0.001128641190007329, -0.11115672439336777, -0.06293446570634842, -0.12355997413396835, -0.015090195462107658, 0.03569141775369644, -0.10799748450517654, 0.11756303161382675, 0.007764098234474659, -0.023169131949543953, -0.05100275203585625, -0.021622149273753166, -0.06503824144601822, -0.21099035441875458, 0.08600138872861862, -0.12012430280447006, 0.05277831479907036, -0.05466948449611664, -0.03552484139800072, -0.0470159538090229, 0.1479700654745102, -0.13555443286895752, -0.06974706053733826, -0.1043398454785347, -0.04177720472216606, 0.13829663395881653, -0.066766656935215, 0.05291682854294777, -0.04698685556650162, 0.16190937161445618, -0.03717939928174019, -0.051272887736558914, 0.014217869378626347, -0.06601013243198395, -0.16725322604179382, -0.0432160459458828, 0.11765497922897339, 0.06102384254336357, 0.008991874754428864, -0.012482878752052784, 0.060734305530786514, 0.007179137784987688, -0.09622911363840103, 0.04142768681049347, 0.13421852886676788, 0.09669851511716843, 0.06593003869056702, -0.029319051653146744, -0.08379698544740677, -0.08882597088813782, -0.08137118816375732, 0.06326296925544739, 0.18011489510536194, -0.05159695819020271, 0.1421975940465927, 0.12578506767749786, -0.11769330501556396, -0.20484425127506256, -0.09146854281425476, -0.01727203279733658, -0.01458398625254631, 0.10335007309913635, -0.19296972453594208, 0.041729673743247986, 0.0850374698638916, -0.02407192811369896, 0.12425753474235535, -0.28065526485443115, -0.1335877776145935, 0.051280148327350616, 0.042324893176555634, -0.17079457640647888, -0.13186252117156982, -0.09242118895053864, -0.010187070816755295, -0.1329384595155716, 0.11278035491704941, -0.01268574595451355, 0.04054220765829086, -0.025320179760456085, 0.051982332020998, 0.03878352791070938, -0.06492727249860764, 0.13165442645549774, -0.01821163482964039, 0.04051840677857399, -0.08846660703420639, -0.011538186110556126, -0.020538995042443275, -0.04937059432268143, 0.07391496002674103, 0.021645227447152138, 0.0322900153696537, -0.06681329756975174, -0.03907473012804985, -0.04675714299082756, 0.03215676173567772, -0.06730754673480988, -0.06250424683094025, -0.05827832594513893, 0.07556372135877609, 0.07855379581451416, -0.011754375882446766, 0.03662540763616562, -0.0438515730202198, 0.054346714168787, 0.22508804500102997, 0.05798983946442604, 0.04446415975689888, -0.12354651093482971, -0.046989474445581436, -0.006814503110945225, 0.006590105127543211, -0.09179721027612686, 0.048669785261154175, 0.0940895825624466, 0.04201579466462135, 0.1153288334608078, -0.011038001626729965, -0.19769389927387238, -0.0027707305271178484, 0.07756669819355011, -0.11051055788993835, -0.20291423797607422, 0.0462532639503479, 0.0359044149518013, -0.10779763013124466, -0.08944597095251083, 0.0910661593079567, 0.028445661067962646, -0.014401989988982677, 0.013272414915263653, 0.07826492190361023, 0.04027209058403969, 0.09075047820806503, -0.03750769793987274, 0.04328456521034241, -0.07506749033927917, 0.13096335530281067, 0.14748834073543549, -0.11986752599477768, -0.010089239105582237, 0.06912636011838913, -0.042621273547410965, -0.05430029332637787, -0.04736199975013733, 0.061100445687770844, 0.002283093985170126, -0.04314702749252319, -0.014729552902281284, -0.07006888836622238, 0.08462759852409363, 0.135823056101799, -0.010256304405629635, 0.07980353385210037, 0.013087260536849499, -0.00548460241407156, -0.04600519686937332, 0.11554304510354996, 0.03857358917593956, 0.043179918080568314, -0.02635347470641136, 0.0313895083963871, 0.01713014580309391, -0.01715678721666336, 0.020351223647594452, -0.06802815198898315, -0.05730278044939041, 0.0169281717389822, -0.17509029805660248, 0.02796010673046112, -0.08392883837223053, -0.009450157172977924, 0.008731561712920666, 0.012491852976381779, -0.0015853344229981303, 0.015637537464499474, -0.05448906496167183, -0.05201851204037666, -0.047464534640312195, 0.11556512862443924, -0.20326180756092072, -0.0021019168198108673, 0.08497415482997894, -0.08505351841449738, 0.07544434070587158, -0.005962709430605173, -0.010559414513409138, 0.010584529489278793, -0.08543489873409271, -0.006448307074606419, -0.01715463399887085, 0.04309321939945221, 0.017660705372691154, -0.15117809176445007, -0.013580380007624626, -0.0009486221824772656, -0.10044238716363907, -0.0027699745260179043, -0.0045001087710261345, -0.1459863781929016, 0.07607482373714447, 0.09886960685253143, -0.049143292009830475, -0.03560848906636238, 0.02465759590268135, 0.030510885640978813, 0.019753610715270042, 0.09940078854560852, -0.028320370241999626, 0.03966580703854561, -0.15382497012615204, -0.03700384125113487, 0.009775682352483273, 0.008986270055174828, 0.052081916481256485, 0.0028249770402908325, 0.02854575403034687, -0.016112206503748894, 0.21713902056217194, -0.019424738362431526, -0.005295713432133198, 0.027151063084602356, -0.0007648731698282063, -0.061365507543087006, 0.02762591280043125, -0.0233471617102623, 0.006438534706830978, 0.013749413192272186, 0.010252255946397781, -0.02641063928604126, -0.0480661503970623, 0.02145320363342762, 0.10279207676649094, 0.11018006503582001, 0.22264748811721802, -0.03372511267662048, 0.03635047376155853, -0.13886931538581848, -0.07673180103302002, 0.0009469183278270066, -0.075022391974926, 0.05981858819723129, -0.056056756526231766, 0.06533344089984894, 0.10649541765451431, -0.13944102823734283, 0.1341216266155243, -0.03433037921786308, -0.023856457322835922, -0.05848146229982376, -0.1941494643688202, -0.033828239887952805, 0.018884949386119843, 0.004404175560921431, -0.08779972791671753, 0.11079783737659454, 0.13170772790908813, 0.01270371675491333, -0.0052386061288416386, 0.064600370824337, -0.0995696485042572, -0.05733099952340126, -0.026102673262357712, 0.02808605134487152, 0.03590073063969612, 0.01576129160821438, 0.060446906834840775, -0.0026820253115147352, 0.05483981594443321, 0.08097153156995773, 0.09853138029575348, 0.06285611540079117, 0.04521474614739418, -0.026006514206528664, -0.03783416748046875, 0.0036177339497953653, -0.02279846929013729, -0.06206376850605011, 0.18137289583683014, 0.07150847464799881, 0.031173599883913994, 0.021770065650343895, 0.20824052393436432, -0.01829431764781475, -0.07045414298772812, -0.13622085750102997, 0.16797815263271332, -0.003761734114959836, 0.02706966921687126, 0.033418383449316025, -0.11710536479949951, -0.004432963207364082, 0.14653423428535461, 0.08347824960947037, 0.02074437402188778, 0.01057536993175745, 0.03752889111638069, 0.02385914884507656, -0.02678924985229969, 0.033785562962293625, 0.04112309589982033, 0.2233310490846634, -0.048612892627716064, 0.08212605863809586, -0.03333835303783417, -0.01649290882050991, -0.03304918110370636, 0.11890964955091476, -0.05821982026100159, 0.017316799610853195, -0.07234743237495422, 0.06679989397525787, -0.06711750477552414, -0.23527058959007263, 0.011130853556096554, -0.05935393646359444, -0.1361977905035019, -0.0023819354828447104, 0.048837002366781235, -0.029528658837080002, 0.04113084450364113, 0.041066281497478485, -0.02756892703473568, 0.18962091207504272, 0.01319353748112917, -0.06370081752538681, -0.08558958768844604, 0.06359270215034485, -0.06829769909381866, 0.27889254689216614, 0.004924592562019825, 0.01781376264989376, 0.08059179782867432, -0.0075190141797065735, -0.13117128610610962, 0.052320919930934906, 0.09029030054807663, -0.06982538849115372, 0.03469260409474373, 0.1337117701768875, -0.014117247425019741, 0.13351908326148987, 0.049439724534749985, 0.011671293526887894, 0.07004320621490479, 0.05629867687821388, 0.02333383448421955, -0.081198550760746, 0.047101929783821106, -0.07884825766086578, 0.11527945846319199, 0.13066361844539642, -0.008038656786084175, 0.018959470093250275, -0.05884454771876335, 0.048522669821977615, -0.05510098859667778, 0.10731580853462219, -0.008152547292411327, -0.12534090876579285, 0.058857616037130356, 0.02228051982820034, 0.0793236494064331, -0.19311797618865967, -0.07514392584562302, 0.09342949092388153, -0.05708364024758339, -0.022008979693055153, 0.09797979891300201, 0.03047320805490017, 0.02687540464103222, -0.0498785637319088, -0.11618677526712418, 0.032066650688648224, 0.10033618658781052, -0.058154232800006866, -0.04159710183739662 ]
eb6a63a264128ced810956ce739745378280a94b
Backup of the releases of https://github.com/deepinsight/insightface due to the following issues: - https://github.com/deepinsight/insightface/issues/1896 - https://github.com/InstantID/InstantID/issues/60
jeantimex/insightface-backup
[ "license:mit", "region:us" ]
2024-01-25T16:24:23+00:00
{"license": "mit"}
2024-01-25T16:40:07+00:00
[]
[]
TAGS #license-mit #region-us
Backup of the releases of URL due to the following issues: - URL - URL
[]
[ "TAGS\n#license-mit #region-us \n" ]
[ 11 ]
[ "passage: TAGS\n#license-mit #region-us \n" ]
[ 0.026221778243780136, -0.033018264919519424, -0.008281232789158821, -0.05295303836464882, 0.052470896393060684, 0.06768012046813965, 0.1598525494337082, 0.04655371606349945, 0.23683255910873413, -0.05407243221998215, 0.11752297729253769, 0.08923697471618652, 0.004284696187824011, -0.0009730930323712528, 0.014216204173862934, -0.17134642601013184, 0.04864625632762909, -0.02878100797533989, 0.08764812350273132, 0.032233644276857376, -0.006205103360116482, -0.03845774009823799, -0.0022142508532851934, -0.03178790956735611, -0.057939812541007996, 0.03869890421628952, 0.045729056000709534, -0.02754949778318405, 0.14189864695072174, -0.021783310920000076, 0.13335508108139038, 0.046146418899297714, -0.011738095432519913, -0.2486042082309723, 0.008575023151934147, -0.07252951711416245, -0.11333522200584412, 0.016201216727495193, 0.035761721432209015, -0.010069100186228752, 0.032174937427043915, 0.11049123108386993, -0.011680051684379578, 0.06288356333971024, -0.2015703022480011, -0.20486389100551605, -0.07508610188961029, -0.07555478066205978, 0.0589042492210865, 0.030872387811541557, 0.05628744140267372, 0.1426718831062317, -0.18022038042545319, -0.0018841808196157217, 0.04129622131586075, -0.3510737717151642, 0.09011197835206985, 0.19666501879692078, 0.06407395005226135, 0.07872317731380463, -0.04774639382958412, 0.06726468354463577, 0.07745297998189926, -0.02402484230697155, -0.10679105669260025, -0.06142130121588707, 0.040939174592494965, 0.15604156255722046, -0.03852643445134163, -0.10356393456459045, 0.2591084837913513, -0.023262828588485718, -0.04234466329216957, 0.08201269060373306, -0.02980397455394268, -0.040379155427217484, 0.04404358193278313, 0.044016025960445404, 0.036236923187971115, 0.182089164853096, 0.1260262131690979, -0.03375067934393883, -0.16269677877426147, -0.030629513785243034, -0.2528207004070282, 0.07418664544820786, -0.003647059667855501, 0.10666298121213913, -0.20037521421909332, 0.03286786004900932, -0.15483668446540833, -0.009493621066212654, -0.02952384203672409, -0.059835705906152725, 0.05229754373431206, -0.0237403754144907, -0.04600388556718826, 0.07238677144050598, 0.08390641957521439, 0.2046167105436325, 0.023024363443255424, 0.016697337850928307, -0.10405295342206955, 0.15052515268325806, 0.019140364602208138, 0.024860305711627007, 0.179348424077034, 0.07677878439426422, -0.04891882464289665, -0.2251969277858734, 0.027894439175724983, -0.03671982139348984, -0.1441805064678192, 0.015881337225437164, -0.1542915552854538, 0.1736440360546112, -0.04078168794512749, -0.06919530034065247, -0.08578147739171982, 0.09790384024381638, 0.07768166810274124, -0.021921472623944283, -0.023105677217245102, -0.01381723117083311, 0.03522264584898949, -0.048196230083703995, -0.11687057465314865, 0.018241960555315018, 0.11869648098945618, 0.12573401629924774, -0.1483907401561737, -0.008189842104911804, -0.017200417816638947, 0.019065292552113533, 0.09696817398071289, -0.112403005361557, 0.028845038264989853, -0.09672309458255768, -0.13033071160316467, 0.036653537303209305, 0.017736904323101044, -0.019008556380867958, 0.1340927630662918, 0.061849117279052734, 0.056560322642326355, -0.011025321669876575, -0.07250872999429703, -0.14035539329051971, -0.08679798245429993, 0.1058693379163742, -0.046787332743406296, 0.010320915840566158, -0.24556252360343933, -0.014234079979360104, -0.14995723962783813, 0.059662189334630966, -0.0037668521981686354, -0.08819212019443512, -0.07740068435668945, 0.21408265829086304, 0.0018596589798107743, 0.04301392287015915, -0.1078512966632843, 0.054903753101825714, -0.06764797121286392, 0.10065380483865738, -0.12895582616329193, -0.06441528350114822, 0.1613781899213791, -0.13135331869125366, -0.14002031087875366, 0.0033312994055449963, -0.009472889825701714, 0.12053907662630081, 0.0802001804113388, 0.44566696882247925, -0.058881040662527084, -0.16201181709766388, 0.1270403116941452, 0.17969723045825958, -0.13685379922389984, -0.25928929448127747, 0.12393020838499069, -0.1636963188648224, -0.16647985577583313, 0.0040023741312325, -0.006962866988033056, 0.08049977570772171, -0.03446655720472336, -0.056274134665727615, 0.042339932173490524, 0.024350708350539207, 0.029094615951180458, 0.01740112341940403, 0.07037191838026047, -0.1023021712899208, 0.08444856107234955, 0.058610700070858, -0.014111426658928394, 0.15077349543571472, 0.011494536884129047, -0.05393160134553909, 0.014761670492589474, 0.044013332575559616, -0.015627963468432426, -0.05899091437458992, -0.09661509096622467, 0.019826244562864304, -0.031149597838521004, 0.08229395002126694, 0.1699674129486084, 0.023824702948331833, -0.02797185815870762, 0.028922779485583305, 0.028606392443180084, 0.1009954959154129, 0.06960704177618027, 0.03099375218153, -0.04839283227920532, 0.04952205345034599, -0.0417071171104908, -0.11430390179157257, -0.004862460307776928, -0.011735930107533932, 0.11975742131471634, -0.08906009048223495, -0.01223952230066061, 0.05951591953635216, -0.04513183981180191, 0.0019881438929587603, 0.0428374819457531, 0.0035966038703918457, 0.1388600617647171, 0.004440935328602791, -0.04352007433772087, 0.17440910637378693, -0.05288633331656456, 0.15533447265625, 0.1715822070837021, -0.07049662619829178, 0.015605369582772255, -0.1273636519908905, 0.003230511210858822, -0.014480113983154297, 0.05292887985706329, -0.05400136485695839, -0.05201306566596031, -0.01274962443858385, 0.014292534440755844, -0.03134604170918465, 0.01711403578519821, -0.06057267636060715, -0.08167021721601486, -0.10849859565496445, 0.018649224191904068, 0.20683221518993378, -0.22544461488723755, 0.1609548032283783, 0.40251004695892334, 0.15190774202346802, 0.21155193448066711, -0.12478897720575333, -0.002471078187227249, -0.06630261242389679, 0.026115071028470993, -0.024814706295728683, 0.13782677054405212, -0.13174867630004883, -0.01413064356893301, 0.03880728408694267, 0.0454997681081295, 0.0661163181066513, -0.17195898294448853, -0.15260353684425354, -0.0034879595041275024, -0.020591814070940018, -0.1749730259180069, 0.04874620959162712, -0.07595308125019073, 0.02181261032819748, 0.018216799944639206, -0.10832522064447403, 0.16837291419506073, -0.033566512167453766, -0.06695768237113953, 0.052613962441682816, -0.20581911504268646, -0.07900715619325638, -0.17772749066352844, -0.18375012278556824, 0.06050071492791176, 0.05760138854384422, 0.07903145253658295, -0.05951719731092453, -0.01922747679054737, 0.061719246208667755, -0.009363299235701561, -0.13802112638950348, -0.04235544428229332, -0.06993678212165833, 0.08744155615568161, -0.09474305808544159, -0.07518411427736282, -0.07833878695964813, -0.046996138989925385, -0.020961694419384003, 0.08125963062047958, -0.1039251759648323, 0.08903530240058899, 0.1493726521730423, 0.03651920333504677, 0.05440247058868408, -0.08271230012178421, 0.12693379819393158, -0.037743739783763885, -0.09459595382213593, 0.07307634502649307, 0.004350725095719099, 0.04920351505279541, 0.24039287865161896, 0.08962162584066391, -0.10578162968158722, -0.01780811697244644, -0.0968487411737442, -0.16405464708805084, -0.2553846538066864, -0.06823288649320602, -0.08744750916957855, 0.14417944848537445, 0.014636521227657795, 0.10712126642465591, 0.14313316345214844, 0.01343101728707552, 0.10255914181470871, -0.08983208239078522, -0.018939344212412834, 0.031209396198391914, 0.2135104089975357, -0.05208220332860947, 0.00838248711079359, -0.13684824109077454, -0.0256142970174551, 0.14601100981235504, 0.13798639178276062, 0.14503207802772522, 0.31421369314193726, 0.15292863547801971, 0.13410434126853943, 0.13474710285663605, 0.12333164364099503, 0.07403261214494705, 0.03444362059235573, -0.015304201282560825, -0.06035377085208893, -0.003846159903332591, 0.02816268615424633, 0.05421729013323784, 0.06724072247743607, -0.22906480729579926, 0.041139665991067886, -0.2661744952201843, 0.03544611483812332, -0.0854712724685669, 0.1161833181977272, -0.028890252113342285, 0.11051984131336212, 0.11386284977197647, 0.05553818494081497, -0.023278791457414627, 0.16036942601203918, 0.032686375081539154, -0.07703183591365814, 0.020292721688747406, 0.024695809930562973, 0.06633034348487854, 0.08606193959712982, 0.09550496190786362, -0.020778406411409378, -0.1831783503293991, 0.025963841006159782, 0.12212833017110825, -0.20747940242290497, 0.289523184299469, 0.013651901856064796, -0.0743619054555893, -0.01690039224922657, -0.06958060711622238, 0.008433517068624496, 0.12829731404781342, 0.10406835377216339, 0.05508929491043091, -0.2613787055015564, -0.13299626111984253, 0.046764206141233444, -0.00873907096683979, 0.11356569826602936, -0.0052223424427211285, -0.14201195538043976, -0.06640999764204025, 0.05814211815595627, -0.006591420155018568, 0.13023322820663452, -0.018290361389517784, -0.08173255622386932, -0.010230090469121933, 0.055564697831869125, -0.001312803477048874, -0.04580084979534149, 0.07523149996995926, 0.009008137509226799, 0.02259289287030697, -0.08178020268678665, 0.03887253627181053, -0.08071476966142654, -0.25375792384147644, 0.019298138096928596, -0.04987313598394394, 0.004092312417924404, -0.04684043675661087, -0.15448936820030212, -0.1129264086484909, -0.15445278584957123, 0.13100723922252655, -0.03675999864935875, 0.091565802693367, -0.0817658007144928, 0.13736046850681305, -0.08521489799022675, 0.05375019088387489, 0.00614814180880785, 0.03918716683983803, -0.017955513671040535, -0.1031481996178627, 0.09334362298250198, -0.1874227225780487, 0.023863423615694046, 0.010427716188132763, -0.056847453117370605, -0.01354232057929039, 0.03918023407459259, -0.08763083070516586, 0.21879427134990692, 0.3331502079963684, -0.011948764324188232, 0.22546616196632385, 0.35863226652145386, -0.13763751089572906, -0.23258967697620392, -0.1205512136220932, -0.3263251483440399, -0.09005610644817352, 0.17321562767028809, -0.18057219684123993, 0.04850830137729645, 0.16150830686092377, -0.10868281871080399, 0.22499866783618927, -0.22723928093910217, -0.04793389141559601, 0.1823979914188385, -0.038322996348142624, 0.4527989625930786, -0.1144307404756546, -0.1784561723470688, -0.03637253865599632, -0.16285361349582672, 0.12426037341356277, -0.026553882285952568, 0.06700495630502701, 0.02416347898542881, -0.011372359469532967, -0.009014161303639412, -0.04529716446995735, 0.2216065675020218, 0.0522729866206646, 0.10468899458646774, -0.09159468114376068, -0.17199653387069702, 0.1907423883676529, -0.0004908236442133784, -0.003372655250132084, -0.05411549657583237, -0.04850282520055771, -0.06871756166219711, 0.033092137426137924, -0.0334564633667469, 0.06195882335305214, 0.03364093229174614, -0.11903523653745651, -0.10248823463916779, 0.034111104905605316, -0.13155671954154968, -0.054850947111845016, 0.26421889662742615, -0.02080743946135044, 0.09609334170818329, 0.04959092289209366, -0.05474294349551201, -0.13538943231105804, 0.005736751481890678, -0.07534020394086838, -0.05711410939693451, 0.06573604047298431, -0.11453206837177277, -0.024341827258467674, 0.1293732225894928, -0.029497180134058, 0.09674722701311111, 0.08061115443706512, -0.07585363835096359, 0.02032829262316227, 0.15617427229881287, -0.07247176766395569, -0.10849180817604065, 0.04999847710132599, 0.04640531167387962, 0.17256882786750793, 0.004101871978491545, 0.02018604800105095, 0.08726977556943893, 0.045959215611219406, -0.007486662827432156, 0.007311292923986912, -0.11321697384119034, -0.04241771996021271, 0.0387241393327713, -0.005273692775517702, -0.10946331918239594, 0.16008898615837097, 0.056837860494852066, 0.004653505515307188, -0.06027700752019882, 0.09720424562692642, -0.06709636747837067, -0.07046061009168625, -0.1753035932779312, 0.018511172384023666, -0.12734080851078033, -0.09874535351991653, 0.06846235692501068, -0.09371624886989594, -0.04084605351090431, 0.08152704685926437, 0.046927981078624725, 0.14401860535144806, -0.006597559433430433, -0.023080874234437943, 0.149825319647789, -0.0884878933429718, -0.2241756170988083, 0.01969664730131626, -0.04083063453435898, -0.07065816223621368, -0.0007070365245454013, 0.06069544702768326, -0.0663156732916832, -0.11958606541156769, -0.20477768778800964, 0.10412076860666275, -0.12043121457099915, -0.03954985365271568, -0.1041841059923172, -0.053260523825883865, 0.07891252636909485, -0.02613759972155094, -0.04122013971209526, -0.047595683485269547, -0.16630595922470093, 0.054254453629255295, 0.07140932232141495, 0.11125344783067703, -0.0759999230504036, -0.018354382365942, 0.1398727148771286, 0.048581548035144806, 0.08479110151529312, 0.07578440010547638, 0.026255371049046516, 0.16728560626506805, -0.1708206981420517, -0.0542997270822525, 0.1068294569849968, -0.026716172695159912, 0.01994573324918747, 0.10631280392408371, -0.04839588701725006, 0.07042654603719711, -0.05095988139510155, 0.05859163776040077, -0.15704534947872162, -0.13073866069316864, -0.04184387996792793, 0.023728877305984497, -0.2260182797908783, 0.015071595087647438, -0.1769561767578125, 0.19692228734493256, -0.024228032678365707, 0.11490963399410248, 0.08052190393209457, 0.02052290178835392, 0.03539382666349411, -0.006019921973347664, 0.00946811307221651, -0.10524865239858627, -0.05784677714109421, -0.07560300827026367, -0.1168874129652977, -0.009665017947554588, 0.36614301800727844, 0.02430291846394539, -0.19682736694812775, 0.051222387701272964, 0.18285293877124786, 0.023639049381017685, -0.0073763905093073845, 0.26180747151374817, 0.08150359988212585, -0.023175053298473358, -0.1782374382019043, 0.0396091528236866, -0.08699734508991241, -0.15269799530506134, 0.11385007947683334, 0.09347525984048843, 0.05813581123948097, 0.022930078208446503, 0.10404518246650696, -0.035940010100603104, -0.05509711429476738, -0.13301853835582733, 0.13368983566761017, -0.001790675800293684, 0.0193882267922163, 0.0897885113954544, 0.19249756634235382, -0.045275162905454636, 0.05437124893069267, -0.07336640357971191, -0.001598604372702539, -0.15740543603897095, -0.13358698785305023, 0.06194563955068588, -0.08269550651311874, 0.06342913210391998, 0.050261519849300385, 0.04341990500688553, 0.31786394119262695, 0.039095040410757065, -0.046439893543720245, 0.003166865324601531, -0.14845187962055206, -0.08075450360774994, -0.06024569645524025, -0.03110554814338684, 0.028620192781090736, -0.13928957283496857, -0.09898591786623001, -0.06917677819728851, -0.130235955119133, -0.06539803743362427, 0.025270747020840645, 0.014251931570470333, -0.053083837032318115, -0.17625881731510162, -0.04808593541383743, -0.06644169986248016, 0.10105955600738525, -0.08462738990783691, 0.1516820639371872, 0.0022449472453445196, 0.030281953513622284, 0.07627002149820328, 0.09585131704807281, 0.018900424242019653, -0.06975197046995163, 0.05599058046936989, 0.12436293810606003, 0.01323844213038683, 0.1259988248348236, -0.06034265458583832, -0.019420607015490532, -0.014145253226161003, 0.14038437604904175, 0.304447740316391, -0.01856905221939087, -0.013814439997076988, -0.022110093384981155, 0.021388787776231766, 0.10893569141626358, 0.19800719618797302, -0.03437356278300285, 0.2551359534263611, -0.058974795043468475, 0.0756678432226181, -0.013180435635149479, -0.005362013820558786, -0.053146667778491974, 0.06074550002813339, 0.06268858164548874, -0.06877048313617706, -0.10191375762224197, 0.15178529918193817, -0.14985080063343048, 0.13306055963039398, 0.14678068459033966, -0.06057753041386604, 0.03797250986099243, 0.0007459368789568543, 0.19896264374256134, -0.03570213168859482, 0.0984780564904213, -0.10653308779001236, -0.10261140763759613, -0.14764924347400665, 0.037690844386816025, -0.36797797679901123, -0.1756322830915451, 0.11731542646884918, 0.14115898311138153, 0.1759258657693863, -0.012341637164354324, 0.056479312479496, 0.0033020609989762306, 0.08296097069978714, -0.04232487455010414, 0.1519634872674942, 0.0612073615193367, -0.017103128135204315, -0.15296664834022522, -0.20328094065189362, -0.0012039330322295427, -0.058561209589242935, 0.055583830922842026, -0.02269243635237217, 0.025347469374537468, 0.07746459543704987, -0.06768939644098282, -0.029180381447076797, -0.02352982573211193, -0.13262848556041718, 0.052229251712560654, -0.04354005306959152, 0.0320255309343338, -0.03958037868142128, -0.022394726052880287, -0.039987675845623016, 0.10721533745527267, -0.22402705252170563, -0.08517231047153473, 0.1422796994447708, -0.03421911224722862, 0.1542559564113617, -0.02848726324737072, -0.12159585952758789, -0.024955326691269875, -0.06977712363004684, 0.10887379199266434, -0.1419300138950348, 0.038592495024204254, 0.13747453689575195, 0.008710617199540138, 0.031119761988520622, -0.2533661723136902, 0.050644006580114365, -0.03556957095861435, -0.016733208671212196, -0.057031940668821335 ]
eceb18a86a9a0fcdde3b642ef8063a03f7e02871
# Dataset Card for Evaluation run of zhengr/MixTAO-7Bx2-MoE-Instruct-v7.0 <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [zhengr/MixTAO-7Bx2-MoE-Instruct-v7.0](https://huggingface.co/zhengr/MixTAO-7Bx2-MoE-Instruct-v7.0) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_zhengr__MixTAO-7Bx2-MoE-Instruct-v7.0", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-01-25T16:50:29.347308](https://huggingface.co/datasets/open-llm-leaderboard/details_zhengr__MixTAO-7Bx2-MoE-Instruct-v7.0/blob/main/results_2024-01-25T16-50-29.347308.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.6525849646763985, "acc_stderr": 0.03210920704271567, "acc_norm": 0.6512626520654442, "acc_norm_stderr": 0.032796956919059296, "mc1": 0.616891064871481, "mc1_stderr": 0.017018461679389855, "mc2": 0.7425887846449252, "mc2_stderr": 0.014374185583246045 }, "harness|arc:challenge|25": { "acc": 0.7184300341296929, "acc_stderr": 0.013143376735009019, "acc_norm": 0.742320819112628, "acc_norm_stderr": 0.0127807705627684 }, "harness|hellaswag|10": { "acc": 0.7317267476598287, "acc_stderr": 0.004421551307678461, "acc_norm": 0.8937462656841266, "acc_norm_stderr": 0.0030753230104084216 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.37, "acc_stderr": 0.04852365870939099, "acc_norm": 0.37, "acc_norm_stderr": 0.04852365870939099 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.6148148148148148, "acc_stderr": 0.04203921040156279, "acc_norm": 0.6148148148148148, "acc_norm_stderr": 0.04203921040156279 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.6907894736842105, "acc_stderr": 0.037610708698674805, "acc_norm": 0.6907894736842105, "acc_norm_stderr": 0.037610708698674805 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.66, "acc_stderr": 0.04760952285695238, "acc_norm": 0.66, "acc_norm_stderr": 0.04760952285695238 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.7094339622641509, "acc_stderr": 0.027943219989337124, "acc_norm": 0.7094339622641509, "acc_norm_stderr": 0.027943219989337124 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.7777777777777778, "acc_stderr": 0.03476590104304134, "acc_norm": 0.7777777777777778, "acc_norm_stderr": 0.03476590104304134 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.48, "acc_stderr": 0.050211673156867795, "acc_norm": 0.48, "acc_norm_stderr": 0.050211673156867795 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.53, "acc_stderr": 0.050161355804659205, "acc_norm": 0.53, "acc_norm_stderr": 0.050161355804659205 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.3, "acc_stderr": 0.046056618647183814, "acc_norm": 0.3, "acc_norm_stderr": 0.046056618647183814 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.6589595375722543, "acc_stderr": 0.036146654241808254, "acc_norm": 0.6589595375722543, "acc_norm_stderr": 0.036146654241808254 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.4215686274509804, "acc_stderr": 0.04913595201274498, "acc_norm": 0.4215686274509804, "acc_norm_stderr": 0.04913595201274498 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.73, "acc_stderr": 0.044619604333847394, "acc_norm": 0.73, "acc_norm_stderr": 0.044619604333847394 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.5787234042553191, "acc_stderr": 0.03227834510146267, "acc_norm": 0.5787234042553191, "acc_norm_stderr": 0.03227834510146267 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.49122807017543857, "acc_stderr": 0.04702880432049615, "acc_norm": 0.49122807017543857, "acc_norm_stderr": 0.04702880432049615 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.5862068965517241, "acc_stderr": 0.04104269211806232, "acc_norm": 0.5862068965517241, "acc_norm_stderr": 0.04104269211806232 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.42857142857142855, "acc_stderr": 0.02548718714785938, "acc_norm": 0.42857142857142855, "acc_norm_stderr": 0.02548718714785938 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.4365079365079365, "acc_stderr": 0.04435932892851466, "acc_norm": 0.4365079365079365, "acc_norm_stderr": 0.04435932892851466 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.33, "acc_stderr": 0.047258156262526045, "acc_norm": 0.33, "acc_norm_stderr": 0.047258156262526045 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.7870967741935484, "acc_stderr": 0.023287665127268542, "acc_norm": 0.7870967741935484, "acc_norm_stderr": 0.023287665127268542 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.49261083743842365, "acc_stderr": 0.035176035403610084, "acc_norm": 0.49261083743842365, "acc_norm_stderr": 0.035176035403610084 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.71, "acc_stderr": 0.045604802157206845, "acc_norm": 0.71, "acc_norm_stderr": 0.045604802157206845 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.793939393939394, "acc_stderr": 0.0315841532404771, "acc_norm": 0.793939393939394, "acc_norm_stderr": 0.0315841532404771 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.7828282828282829, "acc_stderr": 0.02937661648494563, "acc_norm": 0.7828282828282829, "acc_norm_stderr": 0.02937661648494563 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.8963730569948186, "acc_stderr": 0.02199531196364424, "acc_norm": 0.8963730569948186, "acc_norm_stderr": 0.02199531196364424 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.6692307692307692, "acc_stderr": 0.023854795680971125, "acc_norm": 0.6692307692307692, "acc_norm_stderr": 0.023854795680971125 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.34074074074074073, "acc_stderr": 0.028897748741131147, "acc_norm": 0.34074074074074073, "acc_norm_stderr": 0.028897748741131147 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.6638655462184874, "acc_stderr": 0.030684737115135363, "acc_norm": 0.6638655462184874, "acc_norm_stderr": 0.030684737115135363 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.33112582781456956, "acc_stderr": 0.038425817186598696, "acc_norm": 0.33112582781456956, "acc_norm_stderr": 0.038425817186598696 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.8403669724770643, "acc_stderr": 0.015703498348461783, "acc_norm": 0.8403669724770643, "acc_norm_stderr": 0.015703498348461783 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.5092592592592593, "acc_stderr": 0.034093869469927006, "acc_norm": 0.5092592592592593, "acc_norm_stderr": 0.034093869469927006 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.8431372549019608, "acc_stderr": 0.02552472232455335, "acc_norm": 0.8431372549019608, "acc_norm_stderr": 0.02552472232455335 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.7890295358649789, "acc_stderr": 0.026558372502661916, "acc_norm": 0.7890295358649789, "acc_norm_stderr": 0.026558372502661916 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.6905829596412556, "acc_stderr": 0.031024411740572213, "acc_norm": 0.6905829596412556, "acc_norm_stderr": 0.031024411740572213 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.8015267175572519, "acc_stderr": 0.03498149385462472, "acc_norm": 0.8015267175572519, "acc_norm_stderr": 0.03498149385462472 }, "harness|hendrycksTest-international_law|5": { "acc": 0.7768595041322314, "acc_stderr": 0.03800754475228732, "acc_norm": 0.7768595041322314, "acc_norm_stderr": 0.03800754475228732 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.7685185185185185, "acc_stderr": 0.04077494709252626, "acc_norm": 0.7685185185185185, "acc_norm_stderr": 0.04077494709252626 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.7607361963190185, "acc_stderr": 0.0335195387952127, "acc_norm": 0.7607361963190185, "acc_norm_stderr": 0.0335195387952127 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.41964285714285715, "acc_stderr": 0.04684099321077106, "acc_norm": 0.41964285714285715, "acc_norm_stderr": 0.04684099321077106 }, "harness|hendrycksTest-management|5": { "acc": 0.7669902912621359, "acc_stderr": 0.04185832598928315, "acc_norm": 0.7669902912621359, "acc_norm_stderr": 0.04185832598928315 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8888888888888888, "acc_stderr": 0.020588491316092368, "acc_norm": 0.8888888888888888, "acc_norm_stderr": 0.020588491316092368 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.71, "acc_stderr": 0.045604802157206845, "acc_norm": 0.71, "acc_norm_stderr": 0.045604802157206845 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.8237547892720306, "acc_stderr": 0.013625556907993464, "acc_norm": 0.8237547892720306, "acc_norm_stderr": 0.013625556907993464 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.7283236994219653, "acc_stderr": 0.023948512905468365, "acc_norm": 0.7283236994219653, "acc_norm_stderr": 0.023948512905468365 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.45027932960893857, "acc_stderr": 0.01663961523684581, "acc_norm": 0.45027932960893857, "acc_norm_stderr": 0.01663961523684581 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.7189542483660131, "acc_stderr": 0.025738854797818733, "acc_norm": 0.7189542483660131, "acc_norm_stderr": 0.025738854797818733 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.7106109324758842, "acc_stderr": 0.025755865922632945, "acc_norm": 0.7106109324758842, "acc_norm_stderr": 0.025755865922632945 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.7438271604938271, "acc_stderr": 0.0242885336377261, "acc_norm": 0.7438271604938271, "acc_norm_stderr": 0.0242885336377261 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.49645390070921985, "acc_stderr": 0.02982674915328092, "acc_norm": 0.49645390070921985, "acc_norm_stderr": 0.02982674915328092 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.47131681877444587, "acc_stderr": 0.012749206007657476, "acc_norm": 0.47131681877444587, "acc_norm_stderr": 0.012749206007657476 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.6617647058823529, "acc_stderr": 0.028739328513983572, "acc_norm": 0.6617647058823529, "acc_norm_stderr": 0.028739328513983572 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.6764705882352942, "acc_stderr": 0.018926082916083376, "acc_norm": 0.6764705882352942, "acc_norm_stderr": 0.018926082916083376 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6727272727272727, "acc_stderr": 0.0449429086625209, "acc_norm": 0.6727272727272727, "acc_norm_stderr": 0.0449429086625209 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.7346938775510204, "acc_stderr": 0.0282638899437846, "acc_norm": 0.7346938775510204, "acc_norm_stderr": 0.0282638899437846 }, "harness|hendrycksTest-sociology|5": { "acc": 0.8308457711442786, "acc_stderr": 0.02650859065623327, "acc_norm": 0.8308457711442786, "acc_norm_stderr": 0.02650859065623327 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.85, "acc_stderr": 0.0358870281282637, "acc_norm": 0.85, "acc_norm_stderr": 0.0358870281282637 }, "harness|hendrycksTest-virology|5": { "acc": 0.5542168674698795, "acc_stderr": 0.03869543323472101, "acc_norm": 0.5542168674698795, "acc_norm_stderr": 0.03869543323472101 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.8362573099415205, "acc_stderr": 0.028380919596145866, "acc_norm": 0.8362573099415205, "acc_norm_stderr": 0.028380919596145866 }, "harness|truthfulqa:mc|0": { "mc1": 0.616891064871481, "mc1_stderr": 0.017018461679389855, "mc2": 0.7425887846449252, "mc2_stderr": 0.014374185583246045 }, "harness|winogrande|5": { "acc": 0.877663772691397, "acc_stderr": 0.009209260861830796 }, "harness|gsm8k|5": { "acc": 0.6914329037149356, "acc_stderr": 0.0127230760498159 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_zhengr__MixTAO-7Bx2-MoE-Instruct-v7.0
[ "region:us" ]
2024-01-25T16:52:43+00:00
{"pretty_name": "Evaluation run of zhengr/MixTAO-7Bx2-MoE-Instruct-v7.0", "dataset_summary": "Dataset automatically created during the evaluation run of model [zhengr/MixTAO-7Bx2-MoE-Instruct-v7.0](https://huggingface.co/zhengr/MixTAO-7Bx2-MoE-Instruct-v7.0) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_zhengr__MixTAO-7Bx2-MoE-Instruct-v7.0\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-01-25T16:50:29.347308](https://huggingface.co/datasets/open-llm-leaderboard/details_zhengr__MixTAO-7Bx2-MoE-Instruct-v7.0/blob/main/results_2024-01-25T16-50-29.347308.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.6525849646763985,\n \"acc_stderr\": 0.03210920704271567,\n \"acc_norm\": 0.6512626520654442,\n \"acc_norm_stderr\": 0.032796956919059296,\n \"mc1\": 0.616891064871481,\n \"mc1_stderr\": 0.017018461679389855,\n \"mc2\": 0.7425887846449252,\n \"mc2_stderr\": 0.014374185583246045\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.7184300341296929,\n \"acc_stderr\": 0.013143376735009019,\n \"acc_norm\": 0.742320819112628,\n \"acc_norm_stderr\": 0.0127807705627684\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.7317267476598287,\n \"acc_stderr\": 0.004421551307678461,\n \"acc_norm\": 0.8937462656841266,\n \"acc_norm_stderr\": 0.0030753230104084216\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.37,\n \"acc_stderr\": 0.04852365870939099,\n \"acc_norm\": 0.37,\n \"acc_norm_stderr\": 0.04852365870939099\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.6148148148148148,\n \"acc_stderr\": 0.04203921040156279,\n \"acc_norm\": 0.6148148148148148,\n \"acc_norm_stderr\": 0.04203921040156279\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.6907894736842105,\n \"acc_stderr\": 0.037610708698674805,\n \"acc_norm\": 0.6907894736842105,\n \"acc_norm_stderr\": 0.037610708698674805\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.66,\n \"acc_stderr\": 0.04760952285695238,\n \"acc_norm\": 0.66,\n \"acc_norm_stderr\": 0.04760952285695238\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.7094339622641509,\n \"acc_stderr\": 0.027943219989337124,\n \"acc_norm\": 0.7094339622641509,\n \"acc_norm_stderr\": 0.027943219989337124\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.7777777777777778,\n \"acc_stderr\": 0.03476590104304134,\n \"acc_norm\": 0.7777777777777778,\n \"acc_norm_stderr\": 0.03476590104304134\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.48,\n \"acc_stderr\": 0.050211673156867795,\n \"acc_norm\": 0.48,\n \"acc_norm_stderr\": 0.050211673156867795\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.53,\n \"acc_stderr\": 0.050161355804659205,\n \"acc_norm\": 0.53,\n \"acc_norm_stderr\": 0.050161355804659205\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.3,\n \"acc_stderr\": 0.046056618647183814,\n \"acc_norm\": 0.3,\n \"acc_norm_stderr\": 0.046056618647183814\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.6589595375722543,\n \"acc_stderr\": 0.036146654241808254,\n \"acc_norm\": 0.6589595375722543,\n \"acc_norm_stderr\": 0.036146654241808254\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.4215686274509804,\n \"acc_stderr\": 0.04913595201274498,\n \"acc_norm\": 0.4215686274509804,\n \"acc_norm_stderr\": 0.04913595201274498\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.73,\n \"acc_stderr\": 0.044619604333847394,\n \"acc_norm\": 0.73,\n \"acc_norm_stderr\": 0.044619604333847394\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.5787234042553191,\n \"acc_stderr\": 0.03227834510146267,\n \"acc_norm\": 0.5787234042553191,\n \"acc_norm_stderr\": 0.03227834510146267\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.49122807017543857,\n \"acc_stderr\": 0.04702880432049615,\n \"acc_norm\": 0.49122807017543857,\n \"acc_norm_stderr\": 0.04702880432049615\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.5862068965517241,\n \"acc_stderr\": 0.04104269211806232,\n \"acc_norm\": 0.5862068965517241,\n \"acc_norm_stderr\": 0.04104269211806232\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.42857142857142855,\n \"acc_stderr\": 0.02548718714785938,\n \"acc_norm\": 0.42857142857142855,\n \"acc_norm_stderr\": 0.02548718714785938\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.4365079365079365,\n \"acc_stderr\": 0.04435932892851466,\n \"acc_norm\": 0.4365079365079365,\n \"acc_norm_stderr\": 0.04435932892851466\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.33,\n \"acc_stderr\": 0.047258156262526045,\n \"acc_norm\": 0.33,\n \"acc_norm_stderr\": 0.047258156262526045\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.7870967741935484,\n \"acc_stderr\": 0.023287665127268542,\n \"acc_norm\": 0.7870967741935484,\n \"acc_norm_stderr\": 0.023287665127268542\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.49261083743842365,\n \"acc_stderr\": 0.035176035403610084,\n \"acc_norm\": 0.49261083743842365,\n \"acc_norm_stderr\": 0.035176035403610084\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.71,\n \"acc_stderr\": 0.045604802157206845,\n \"acc_norm\": 0.71,\n \"acc_norm_stderr\": 0.045604802157206845\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.793939393939394,\n \"acc_stderr\": 0.0315841532404771,\n \"acc_norm\": 0.793939393939394,\n \"acc_norm_stderr\": 0.0315841532404771\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.7828282828282829,\n \"acc_stderr\": 0.02937661648494563,\n \"acc_norm\": 0.7828282828282829,\n \"acc_norm_stderr\": 0.02937661648494563\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.8963730569948186,\n \"acc_stderr\": 0.02199531196364424,\n \"acc_norm\": 0.8963730569948186,\n \"acc_norm_stderr\": 0.02199531196364424\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.6692307692307692,\n \"acc_stderr\": 0.023854795680971125,\n \"acc_norm\": 0.6692307692307692,\n \"acc_norm_stderr\": 0.023854795680971125\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.34074074074074073,\n \"acc_stderr\": 0.028897748741131147,\n \"acc_norm\": 0.34074074074074073,\n \"acc_norm_stderr\": 0.028897748741131147\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.6638655462184874,\n \"acc_stderr\": 0.030684737115135363,\n \"acc_norm\": 0.6638655462184874,\n \"acc_norm_stderr\": 0.030684737115135363\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.33112582781456956,\n \"acc_stderr\": 0.038425817186598696,\n \"acc_norm\": 0.33112582781456956,\n \"acc_norm_stderr\": 0.038425817186598696\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.8403669724770643,\n \"acc_stderr\": 0.015703498348461783,\n \"acc_norm\": 0.8403669724770643,\n \"acc_norm_stderr\": 0.015703498348461783\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.5092592592592593,\n \"acc_stderr\": 0.034093869469927006,\n \"acc_norm\": 0.5092592592592593,\n \"acc_norm_stderr\": 0.034093869469927006\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.8431372549019608,\n \"acc_stderr\": 0.02552472232455335,\n \"acc_norm\": 0.8431372549019608,\n \"acc_norm_stderr\": 0.02552472232455335\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.7890295358649789,\n \"acc_stderr\": 0.026558372502661916,\n \"acc_norm\": 0.7890295358649789,\n \"acc_norm_stderr\": 0.026558372502661916\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.6905829596412556,\n \"acc_stderr\": 0.031024411740572213,\n \"acc_norm\": 0.6905829596412556,\n \"acc_norm_stderr\": 0.031024411740572213\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.8015267175572519,\n \"acc_stderr\": 0.03498149385462472,\n \"acc_norm\": 0.8015267175572519,\n \"acc_norm_stderr\": 0.03498149385462472\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.7768595041322314,\n \"acc_stderr\": 0.03800754475228732,\n \"acc_norm\": 0.7768595041322314,\n \"acc_norm_stderr\": 0.03800754475228732\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.7685185185185185,\n \"acc_stderr\": 0.04077494709252626,\n \"acc_norm\": 0.7685185185185185,\n \"acc_norm_stderr\": 0.04077494709252626\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.7607361963190185,\n \"acc_stderr\": 0.0335195387952127,\n \"acc_norm\": 0.7607361963190185,\n \"acc_norm_stderr\": 0.0335195387952127\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.41964285714285715,\n \"acc_stderr\": 0.04684099321077106,\n \"acc_norm\": 0.41964285714285715,\n \"acc_norm_stderr\": 0.04684099321077106\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.7669902912621359,\n \"acc_stderr\": 0.04185832598928315,\n \"acc_norm\": 0.7669902912621359,\n \"acc_norm_stderr\": 0.04185832598928315\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8888888888888888,\n \"acc_stderr\": 0.020588491316092368,\n \"acc_norm\": 0.8888888888888888,\n \"acc_norm_stderr\": 0.020588491316092368\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.71,\n \"acc_stderr\": 0.045604802157206845,\n \"acc_norm\": 0.71,\n \"acc_norm_stderr\": 0.045604802157206845\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.8237547892720306,\n \"acc_stderr\": 0.013625556907993464,\n \"acc_norm\": 0.8237547892720306,\n \"acc_norm_stderr\": 0.013625556907993464\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.7283236994219653,\n \"acc_stderr\": 0.023948512905468365,\n \"acc_norm\": 0.7283236994219653,\n \"acc_norm_stderr\": 0.023948512905468365\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.45027932960893857,\n \"acc_stderr\": 0.01663961523684581,\n \"acc_norm\": 0.45027932960893857,\n \"acc_norm_stderr\": 0.01663961523684581\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.7189542483660131,\n \"acc_stderr\": 0.025738854797818733,\n \"acc_norm\": 0.7189542483660131,\n \"acc_norm_stderr\": 0.025738854797818733\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.7106109324758842,\n \"acc_stderr\": 0.025755865922632945,\n \"acc_norm\": 0.7106109324758842,\n \"acc_norm_stderr\": 0.025755865922632945\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.7438271604938271,\n \"acc_stderr\": 0.0242885336377261,\n \"acc_norm\": 0.7438271604938271,\n \"acc_norm_stderr\": 0.0242885336377261\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.49645390070921985,\n \"acc_stderr\": 0.02982674915328092,\n \"acc_norm\": 0.49645390070921985,\n \"acc_norm_stderr\": 0.02982674915328092\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.47131681877444587,\n \"acc_stderr\": 0.012749206007657476,\n \"acc_norm\": 0.47131681877444587,\n \"acc_norm_stderr\": 0.012749206007657476\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.6617647058823529,\n \"acc_stderr\": 0.028739328513983572,\n \"acc_norm\": 0.6617647058823529,\n \"acc_norm_stderr\": 0.028739328513983572\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.6764705882352942,\n \"acc_stderr\": 0.018926082916083376,\n \"acc_norm\": 0.6764705882352942,\n \"acc_norm_stderr\": 0.018926082916083376\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6727272727272727,\n \"acc_stderr\": 0.0449429086625209,\n \"acc_norm\": 0.6727272727272727,\n \"acc_norm_stderr\": 0.0449429086625209\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.7346938775510204,\n \"acc_stderr\": 0.0282638899437846,\n \"acc_norm\": 0.7346938775510204,\n \"acc_norm_stderr\": 0.0282638899437846\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.8308457711442786,\n \"acc_stderr\": 0.02650859065623327,\n \"acc_norm\": 0.8308457711442786,\n \"acc_norm_stderr\": 0.02650859065623327\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.85,\n \"acc_stderr\": 0.0358870281282637,\n \"acc_norm\": 0.85,\n \"acc_norm_stderr\": 0.0358870281282637\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5542168674698795,\n \"acc_stderr\": 0.03869543323472101,\n \"acc_norm\": 0.5542168674698795,\n \"acc_norm_stderr\": 0.03869543323472101\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.8362573099415205,\n \"acc_stderr\": 0.028380919596145866,\n \"acc_norm\": 0.8362573099415205,\n \"acc_norm_stderr\": 0.028380919596145866\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.616891064871481,\n \"mc1_stderr\": 0.017018461679389855,\n \"mc2\": 0.7425887846449252,\n \"mc2_stderr\": 0.014374185583246045\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.877663772691397,\n \"acc_stderr\": 0.009209260861830796\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.6914329037149356,\n \"acc_stderr\": 0.0127230760498159\n }\n}\n```", "repo_url": "https://huggingface.co/zhengr/MixTAO-7Bx2-MoE-Instruct-v7.0", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_01_25T16_50_29.347308", "path": ["**/details_harness|arc:challenge|25_2024-01-25T16-50-29.347308.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-01-25T16-50-29.347308.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_01_25T16_50_29.347308", "path": ["**/details_harness|gsm8k|5_2024-01-25T16-50-29.347308.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-01-25T16-50-29.347308.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_01_25T16_50_29.347308", "path": ["**/details_harness|hellaswag|10_2024-01-25T16-50-29.347308.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-01-25T16-50-29.347308.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_01_25T16_50_29.347308", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-25T16-50-29.347308.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-25T16-50-29.347308.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-25T16-50-29.347308.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-25T16-50-29.347308.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-25T16-50-29.347308.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-25T16-50-29.347308.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-25T16-50-29.347308.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-25T16-50-29.347308.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-25T16-50-29.347308.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-25T16-50-29.347308.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-25T16-50-29.347308.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-25T16-50-29.347308.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-25T16-50-29.347308.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-25T16-50-29.347308.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-25T16-50-29.347308.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-25T16-50-29.347308.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-25T16-50-29.347308.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-25T16-50-29.347308.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-25T16-50-29.347308.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-25T16-50-29.347308.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-25T16-50-29.347308.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-25T16-50-29.347308.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-25T16-50-29.347308.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-25T16-50-29.347308.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-25T16-50-29.347308.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-25T16-50-29.347308.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-25T16-50-29.347308.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-25T16-50-29.347308.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-25T16-50-29.347308.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-25T16-50-29.347308.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-25T16-50-29.347308.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-25T16-50-29.347308.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-25T16-50-29.347308.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-25T16-50-29.347308.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-25T16-50-29.347308.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-25T16-50-29.347308.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-25T16-50-29.347308.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-25T16-50-29.347308.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-25T16-50-29.347308.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-25T16-50-29.347308.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-25T16-50-29.347308.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-25T16-50-29.347308.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-25T16-50-29.347308.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-25T16-50-29.347308.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-25T16-50-29.347308.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-25T16-50-29.347308.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-25T16-50-29.347308.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-25T16-50-29.347308.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-25T16-50-29.347308.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-25T16-50-29.347308.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-25T16-50-29.347308.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-25T16-50-29.347308.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-25T16-50-29.347308.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-25T16-50-29.347308.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-25T16-50-29.347308.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-25T16-50-29.347308.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-25T16-50-29.347308.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-25T16-50-29.347308.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-25T16-50-29.347308.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-25T16-50-29.347308.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-25T16-50-29.347308.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-25T16-50-29.347308.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-25T16-50-29.347308.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-25T16-50-29.347308.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-25T16-50-29.347308.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-25T16-50-29.347308.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-25T16-50-29.347308.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-25T16-50-29.347308.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-25T16-50-29.347308.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-25T16-50-29.347308.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-25T16-50-29.347308.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-25T16-50-29.347308.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-25T16-50-29.347308.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-25T16-50-29.347308.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-25T16-50-29.347308.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-25T16-50-29.347308.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-25T16-50-29.347308.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-25T16-50-29.347308.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-25T16-50-29.347308.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-25T16-50-29.347308.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-25T16-50-29.347308.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-25T16-50-29.347308.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-25T16-50-29.347308.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-25T16-50-29.347308.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-25T16-50-29.347308.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-25T16-50-29.347308.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-25T16-50-29.347308.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-25T16-50-29.347308.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-25T16-50-29.347308.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-25T16-50-29.347308.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-25T16-50-29.347308.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-25T16-50-29.347308.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-25T16-50-29.347308.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-25T16-50-29.347308.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-25T16-50-29.347308.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-25T16-50-29.347308.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-25T16-50-29.347308.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-25T16-50-29.347308.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-25T16-50-29.347308.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-25T16-50-29.347308.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-25T16-50-29.347308.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-25T16-50-29.347308.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-25T16-50-29.347308.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-25T16-50-29.347308.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-25T16-50-29.347308.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-25T16-50-29.347308.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-25T16-50-29.347308.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-25T16-50-29.347308.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-25T16-50-29.347308.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-25T16-50-29.347308.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-25T16-50-29.347308.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-25T16-50-29.347308.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-25T16-50-29.347308.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-25T16-50-29.347308.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_01_25T16_50_29.347308", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-25T16-50-29.347308.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-25T16-50-29.347308.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_01_25T16_50_29.347308", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-25T16-50-29.347308.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-25T16-50-29.347308.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_01_25T16_50_29.347308", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-25T16-50-29.347308.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-25T16-50-29.347308.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_01_25T16_50_29.347308", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-25T16-50-29.347308.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-25T16-50-29.347308.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_01_25T16_50_29.347308", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-25T16-50-29.347308.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-25T16-50-29.347308.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_01_25T16_50_29.347308", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-25T16-50-29.347308.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-25T16-50-29.347308.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_01_25T16_50_29.347308", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-25T16-50-29.347308.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-25T16-50-29.347308.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_01_25T16_50_29.347308", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-25T16-50-29.347308.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-25T16-50-29.347308.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_01_25T16_50_29.347308", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-25T16-50-29.347308.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-25T16-50-29.347308.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_01_25T16_50_29.347308", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-25T16-50-29.347308.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-25T16-50-29.347308.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_01_25T16_50_29.347308", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-25T16-50-29.347308.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-25T16-50-29.347308.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_01_25T16_50_29.347308", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-25T16-50-29.347308.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-25T16-50-29.347308.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_01_25T16_50_29.347308", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-25T16-50-29.347308.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-25T16-50-29.347308.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_01_25T16_50_29.347308", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-25T16-50-29.347308.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-25T16-50-29.347308.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_01_25T16_50_29.347308", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-25T16-50-29.347308.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-25T16-50-29.347308.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_01_25T16_50_29.347308", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-25T16-50-29.347308.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-25T16-50-29.347308.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_01_25T16_50_29.347308", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-25T16-50-29.347308.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-25T16-50-29.347308.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_01_25T16_50_29.347308", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-25T16-50-29.347308.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-25T16-50-29.347308.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_01_25T16_50_29.347308", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-25T16-50-29.347308.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-25T16-50-29.347308.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_01_25T16_50_29.347308", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-25T16-50-29.347308.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-25T16-50-29.347308.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_01_25T16_50_29.347308", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-25T16-50-29.347308.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-25T16-50-29.347308.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_01_25T16_50_29.347308", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-25T16-50-29.347308.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-25T16-50-29.347308.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_01_25T16_50_29.347308", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-25T16-50-29.347308.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-25T16-50-29.347308.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_01_25T16_50_29.347308", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-25T16-50-29.347308.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-25T16-50-29.347308.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_01_25T16_50_29.347308", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-25T16-50-29.347308.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-25T16-50-29.347308.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_01_25T16_50_29.347308", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-25T16-50-29.347308.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-25T16-50-29.347308.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_01_25T16_50_29.347308", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-25T16-50-29.347308.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-25T16-50-29.347308.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_01_25T16_50_29.347308", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-25T16-50-29.347308.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-25T16-50-29.347308.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_01_25T16_50_29.347308", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-25T16-50-29.347308.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-25T16-50-29.347308.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_01_25T16_50_29.347308", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-25T16-50-29.347308.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-25T16-50-29.347308.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_01_25T16_50_29.347308", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-25T16-50-29.347308.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-25T16-50-29.347308.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_01_25T16_50_29.347308", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-25T16-50-29.347308.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-25T16-50-29.347308.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_01_25T16_50_29.347308", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-25T16-50-29.347308.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-25T16-50-29.347308.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_01_25T16_50_29.347308", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-25T16-50-29.347308.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-25T16-50-29.347308.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_01_25T16_50_29.347308", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-25T16-50-29.347308.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-25T16-50-29.347308.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_01_25T16_50_29.347308", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-25T16-50-29.347308.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-25T16-50-29.347308.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_01_25T16_50_29.347308", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-25T16-50-29.347308.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-25T16-50-29.347308.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_01_25T16_50_29.347308", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-25T16-50-29.347308.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-25T16-50-29.347308.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_01_25T16_50_29.347308", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-25T16-50-29.347308.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-25T16-50-29.347308.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_01_25T16_50_29.347308", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-25T16-50-29.347308.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-25T16-50-29.347308.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_01_25T16_50_29.347308", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-25T16-50-29.347308.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-25T16-50-29.347308.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_01_25T16_50_29.347308", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-25T16-50-29.347308.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-25T16-50-29.347308.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_01_25T16_50_29.347308", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-25T16-50-29.347308.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-25T16-50-29.347308.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_01_25T16_50_29.347308", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-25T16-50-29.347308.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-25T16-50-29.347308.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_01_25T16_50_29.347308", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-25T16-50-29.347308.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-25T16-50-29.347308.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_01_25T16_50_29.347308", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-25T16-50-29.347308.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-25T16-50-29.347308.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_01_25T16_50_29.347308", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-25T16-50-29.347308.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-25T16-50-29.347308.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_01_25T16_50_29.347308", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-25T16-50-29.347308.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-25T16-50-29.347308.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_01_25T16_50_29.347308", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-25T16-50-29.347308.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-25T16-50-29.347308.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_01_25T16_50_29.347308", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-25T16-50-29.347308.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-25T16-50-29.347308.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_01_25T16_50_29.347308", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-25T16-50-29.347308.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-25T16-50-29.347308.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_01_25T16_50_29.347308", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-25T16-50-29.347308.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-25T16-50-29.347308.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_01_25T16_50_29.347308", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-25T16-50-29.347308.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-25T16-50-29.347308.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_01_25T16_50_29.347308", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-25T16-50-29.347308.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-25T16-50-29.347308.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_01_25T16_50_29.347308", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-25T16-50-29.347308.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-25T16-50-29.347308.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_01_25T16_50_29.347308", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-25T16-50-29.347308.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-25T16-50-29.347308.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_01_25T16_50_29.347308", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-25T16-50-29.347308.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-25T16-50-29.347308.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_01_25T16_50_29.347308", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-25T16-50-29.347308.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-25T16-50-29.347308.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_01_25T16_50_29.347308", "path": ["**/details_harness|winogrande|5_2024-01-25T16-50-29.347308.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-01-25T16-50-29.347308.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_01_25T16_50_29.347308", "path": ["results_2024-01-25T16-50-29.347308.parquet"]}, {"split": "latest", "path": ["results_2024-01-25T16-50-29.347308.parquet"]}]}]}
2024-01-25T16:53:13+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of zhengr/MixTAO-7Bx2-MoE-Instruct-v7.0 Dataset automatically created during the evaluation run of model zhengr/MixTAO-7Bx2-MoE-Instruct-v7.0 on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-01-25T16:50:29.347308(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of zhengr/MixTAO-7Bx2-MoE-Instruct-v7.0\n\n\n\nDataset automatically created during the evaluation run of model zhengr/MixTAO-7Bx2-MoE-Instruct-v7.0 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-25T16:50:29.347308(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of zhengr/MixTAO-7Bx2-MoE-Instruct-v7.0\n\n\n\nDataset automatically created during the evaluation run of model zhengr/MixTAO-7Bx2-MoE-Instruct-v7.0 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-25T16:50:29.347308(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ 6, 201, 68, 4, 40, 29, 3, 4, 9, 6, 5, 7, 4, 7, 10, 9, 5, 9, 8, 10, 46, 8, 7, 10, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of zhengr/MixTAO-7Bx2-MoE-Instruct-v7.0\n\n\n\nDataset automatically created during the evaluation run of model zhengr/MixTAO-7Bx2-MoE-Instruct-v7.0 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2024-01-25T16:50:29.347308(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):## Dataset Details### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:## Uses### Direct Use### Out-of-Scope Use## Dataset Structure## Dataset Creation### Curation Rationale### Source Data#### Data Collection and Processing#### Who are the source data producers?### Annotations [optional]#### Annotation process#### Who are the annotators?#### Personal and Sensitive Information## Bias, Risks, and Limitations### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:## Glossary [optional]## More Information [optional]" ]
[ -0.07432286441326141, 0.17054571211338043, -0.005116330925375223, 0.030451243743300438, 0.07209548354148865, -0.028540557250380516, 0.002647190587595105, 0.10893005132675171, -0.005784314125776291, 0.1615384817123413, -0.04092775657773018, 0.08172552287578583, 0.09217402338981628, 0.1726294904947281, 0.0035016548354178667, -0.13339965045452118, 0.008552111685276031, -0.07641159743070602, 0.05992654711008072, 0.07180216908454895, 0.09657373279333115, -0.09636643528938293, 0.060719288885593414, 0.0005323382210917771, -0.00823821872472763, -0.00770606379956007, -0.08399802446365356, -0.029823584482073784, 0.08267645537853241, 0.08598924428224564, 0.014141603372991085, -0.007936855778098106, 0.007811452727764845, -0.2501911222934723, 0.021392032504081726, 0.06686379015445709, 0.012955819256603718, 0.059975024312734604, 0.13977867364883423, -0.0610293373465538, 0.06143514811992645, -0.07523413002490997, 0.033733855932950974, 0.062405508011579514, -0.09712725877761841, -0.08743463456630707, -0.169143944978714, 0.025839248672127724, 0.07310280203819275, 0.04561547562479973, -0.02810804545879364, 0.11816485971212387, -0.01631810888648033, 0.0188625305891037, 0.15264955163002014, -0.1436621993780136, -0.031055502593517303, -0.011990319006145, 0.043136242777109146, 0.024128664284944534, -0.12808509171009064, -0.008861519396305084, 0.02444988675415516, 0.02955055795609951, 0.008718574419617653, 0.00802754145115614, 0.022858384996652603, 0.005619501695036888, -0.12801243364810944, -0.07064495980739594, 0.16666769981384277, 0.015709077939391136, -0.05034346505999565, -0.15812650322914124, -0.02677161432802677, -0.004697687458246946, -0.015526111237704754, -0.009732388891279697, 0.00611798046156764, -0.016371779143810272, 0.07432755827903748, -0.019868358969688416, -0.10348161309957504, -0.010944679379463196, -0.013832440599799156, 0.06243690475821495, 0.020756805315613747, -0.008665762841701508, 0.009650359861552715, 0.11581946909427643, 0.024890106171369553, -0.09356344491243362, -0.10188394784927368, -0.055441129952669144, -0.11967048794031143, -0.04714497551321983, 0.020570488646626472, -0.06722002476453781, 0.04475799947977066, 0.24325819313526154, -0.057662565261125565, 0.029813634231686592, -0.06662169843912125, -0.008783798664808273, 0.11768940836191177, 0.04739077761769295, -0.0353056825697422, -0.0808035358786583, -0.004482194781303406, 0.03596928343176842, 0.0127260722219944, -0.011696579866111279, 0.02548019029200077, 0.05663527175784111, 0.06183316931128502, 0.12001732736825943, 0.10811049491167068, 0.01683855801820755, -0.05957774072885513, -0.024796780198812485, 0.18025946617126465, -0.1648728996515274, 0.005860039498656988, 0.013827230781316757, -0.059150442481040955, -0.10226883739233017, 0.03965742886066437, -0.0032718656584620476, -0.06487100571393967, 0.08866869658231735, -0.06360811740159988, -0.04871825501322746, -0.08142522722482681, -0.039646830409765244, 0.04362883418798447, 0.0016410679090768099, -0.03862852230668068, -0.07077784836292267, -0.10276226699352264, -0.08021803200244904, 0.017167983576655388, -0.07545043528079987, -0.007638359908014536, 0.035707131028175354, -0.012774617411196232, -0.010771899484097958, -0.015280785970389843, 0.16047678887844086, -0.06924138963222504, 0.017109213396906853, -0.0005127662443555892, -0.01999777927994728, 0.09455645084381104, 0.03988394886255264, -0.10726609081029892, 0.0858791172504425, -0.013635202310979366, 0.1078249141573906, -0.08841574937105179, -0.015722448006272316, -0.12430679798126221, 0.0018095857230946422, -0.038716528564691544, -0.0011888775043189526, 0.015326766297221184, 0.09867224097251892, -0.24418112635612488, 0.023882264271378517, 0.09806320071220398, -0.1027730405330658, -0.097462959587574, 0.04176351800560951, -0.03906078264117241, 0.06080251187086105, 0.04450458288192749, 0.06969798356294632, 0.14505571126937866, -0.07696504890918732, -0.1445126235485077, -0.09350206702947617, -0.01503088977187872, 0.1264832764863968, 0.06008141487836838, -0.05586135759949684, 0.15659940242767334, 0.038696497678756714, -0.006451335735619068, -0.05685507133603096, -0.031132444739341736, -0.05784374475479126, -0.005755119491368532, -0.056437306106090546, -0.07859986275434494, -0.006702650338411331, -0.05621786043047905, -0.030771583318710327, -0.08041117340326309, 0.028876950964331627, 0.08871948719024658, 0.01989203877747059, 0.012967114336788654, -0.0906236320734024, 0.06957164406776428, 0.01554372813552618, 0.024629218503832817, -0.23301853239536285, -0.0864042341709137, 0.036196865141391754, -0.11687719821929932, 0.03882376849651337, 0.011989054270088673, 0.009244874119758606, 0.04616045951843262, 0.0015350381145253778, 0.002965109422802925, 0.004969228524714708, -0.00017960091645363718, -0.008684014901518822, -0.1284557580947876, -0.06869877129793167, -0.07116123288869858, 0.07657565176486969, -0.13392870128154755, -0.027888361364603043, 0.09598562121391296, 0.17910265922546387, 0.009181780740618706, -0.0897793099284172, 0.06462620198726654, -0.016519786790013313, -0.05347876995801926, -0.0566277951002121, 0.0029767858795821667, -0.003131554927676916, 0.06176292523741722, 0.04284019395709038, -0.18295998871326447, -0.14147186279296875, 0.05673324316740036, 0.12919287383556366, -0.0576077476143837, -0.057606182992458344, -0.09737293422222137, -0.057941555976867676, -0.08110149949789047, -0.06563368439674377, 0.07841084152460098, 0.06910806149244308, 0.040018483996391296, -0.06726132333278656, -0.09313317388296127, 0.0002284253714606166, 0.05135621502995491, -0.059858839958906174, 0.08930786699056625, 0.0362841971218586, -0.1387251913547516, 0.09015461802482605, -0.006260792259126902, 0.13546396791934967, 0.11802127957344055, -0.009205699898302555, -0.09371870011091232, -0.00946747325360775, 0.06389870494604111, 0.04287167638540268, 0.09660529345273972, 0.002792723709717393, 0.026182809844613075, 0.08208084106445312, -0.003098670393228531, 0.05139279365539551, -0.06451637297868729, 0.05685952305793762, 0.02239803783595562, 0.003428941359743476, 0.06040727347135544, 0.038839686661958694, 0.004852721001952887, 0.06652823090553284, 0.034987643361091614, 0.11221713572740555, -0.029495585709810257, -0.04994470998644829, -0.09700153023004532, 0.12563318014144897, -0.09361767023801804, -0.2403322011232376, -0.14661498367786407, -0.053488776087760925, -0.037333350628614426, -0.017970150336623192, 0.04551749676465988, 0.015674583613872528, -0.09091474115848541, -0.09994948655366898, 0.02536090649664402, 0.030627427622675896, -0.1042071282863617, -0.030459271743893623, 0.037004295736551285, 0.02055967040359974, -0.15771715342998505, 0.03390256687998772, 0.014829306863248348, -0.024467134848237038, -0.012922151014208794, 0.10014695674180984, 0.12789718806743622, 0.05539282038807869, 0.04271818697452545, -0.014257822185754776, 0.0021123834885656834, 0.17792320251464844, -0.08025157451629639, 0.04494914039969444, 0.13189086318016052, -0.03432953730225563, 0.06994243711233139, 0.12429147213697433, -0.002357937628403306, -0.07903656363487244, 0.023553159087896347, 0.09739790111780167, -0.0588967390358448, -0.24472574889659882, -0.07673080265522003, -0.05326182022690773, -0.03906910493969917, 0.08637582510709763, 0.08298468589782715, 0.00835830345749855, 0.00880985427647829, -0.12465135008096695, -0.03699176385998726, -0.03597276285290718, 0.060601674020290375, 0.03948722407221794, 0.023522404953837395, 0.042868271470069885, -0.03204861283302307, 0.048706430941820145, 0.11241572350263596, 0.008903172798454762, 0.18894131481647491, -0.04686331748962402, 0.18578031659126282, 0.09161337465047836, 0.09597335010766983, -0.0331590473651886, 0.07400237023830414, 0.002355050528421998, 0.06114201992750168, 0.014038002118468285, -0.09813050180673599, -0.03604884445667267, 0.07740649580955505, 0.015881521627306938, -0.018319465219974518, 0.041882991790771484, -0.020810594782233238, 0.06438448280096054, 0.1670796275138855, -0.00608847988769412, -0.11047529429197311, -0.07053560763597488, 0.038205333054065704, -0.04123972728848457, -0.10105377435684204, -0.040301550179719925, 0.060409869998693466, -0.1367792785167694, 0.027995485812425613, -0.04458659514784813, 0.09285328537225723, -0.09531433135271072, -0.03336523845791817, -0.01642349362373352, 0.10228526592254639, -0.006512626074254513, 0.10729187726974487, -0.12201068550348282, 0.06791083514690399, -0.006646180059760809, 0.06906462460756302, -0.09100065380334854, 0.06863392889499664, -0.0038508218713104725, -0.06468451023101807, 0.12076480686664581, 0.003176902187988162, -0.08458516001701355, -0.007478384301066399, -0.11327797174453735, -0.01568310707807541, 0.06648946553468704, -0.12026917934417725, 0.10623230040073395, 0.006149176973849535, -0.02279464155435562, -0.03759333863854408, -0.006479833275079727, -0.13893269002437592, -0.2176545411348343, 0.12280005216598511, -0.1316670924425125, 0.043922800570726395, -0.05936962366104126, -0.04268068075180054, -0.03611133247613907, 0.1880664974451065, -0.10608429461717606, -0.06989128142595291, -0.13394473493099213, 0.05400284752249718, 0.184209942817688, -0.05819343030452728, 0.060730304569005966, -0.0435061976313591, 0.2024514526128769, -0.01656499132514, -0.06383169442415237, -0.028322651982307434, -0.08835941553115845, -0.1618192493915558, -0.03861042112112045, 0.1547223925590515, 0.04296904057264328, 0.0029388542752712965, 0.011813673190772533, 0.03322936221957207, -0.001988479867577553, -0.09246063232421875, 0.0281243696808815, 0.04304814711213112, 0.12316203862428665, 0.008442860096693039, -0.04354371875524521, -0.12149284780025482, -0.10579703003168106, -0.10212767124176025, 0.05242568254470825, 0.16122691333293915, -0.04813586547970772, 0.12965518236160278, 0.08633691072463989, -0.08660943061113358, -0.16833753883838654, -0.0767398253083229, 0.035898610949516296, -0.0039979517459869385, 0.07936318218708038, -0.16055984795093536, 0.04287026450037956, 0.05638086050748825, -0.022194862365722656, 0.16234096884727478, -0.2137574404478073, -0.1513018012046814, 0.02223602496087551, 0.029266975820064545, -0.18261921405792236, -0.14407147467136383, -0.12030457705259323, -0.016014588996767998, -0.14105768501758575, 0.1451755315065384, 0.01102434005588293, 0.03743544593453407, -0.015737175941467285, 0.01834183931350708, 0.039411842823028564, -0.05819492042064667, 0.13326701521873474, -0.0061705876141786575, 0.014010652899742126, -0.0922086164355278, -0.0436609648168087, 0.02645438350737095, -0.059586185961961746, 0.05826316401362419, 0.008603867143392563, 0.05683241784572601, -0.0955427810549736, -0.025581588968634605, -0.046243295073509216, 0.06200665980577469, -0.05945307016372681, -0.04780079796910286, -0.07241456210613251, 0.09285462647676468, 0.08045203983783722, -0.006653551943600178, 0.04141853377223015, -0.029753338545560837, 0.0125520508736372, 0.22625435888767242, 0.07391234487295151, 0.025138506665825844, -0.0939955785870552, -0.04437575861811638, 0.011267120949923992, -0.00682597141712904, -0.0945385992527008, 0.04501203075051308, 0.09729938954114914, 0.054431430995464325, 0.0840829387307167, -0.01873382367193699, -0.20164500176906586, 0.005540632177144289, 0.09437109529972076, -0.10089456290006638, -0.1883126199245453, 0.0009496488492004573, 0.1345152109861374, -0.12625685334205627, -0.05895395204424858, 0.07963166385889053, 0.03114897385239601, -0.02512694150209427, 0.00940822996199131, 0.057963043451309204, 0.045600272715091705, 0.11925579607486725, -0.011693829670548439, 0.060118190944194794, -0.07595312595367432, 0.09333145618438721, 0.11957203596830368, -0.009311149828135967, -0.001056737033650279, 0.08253248035907745, -0.07548576593399048, -0.0566222257912159, 0.024360675364732742, 0.04499585181474686, 0.0050689177587628365, -0.03464653715491295, 0.005149843171238899, -0.04430467262864113, 0.061287302523851395, 0.13863052427768707, -0.0034406043123453856, 0.03797848895192146, 0.029470350593328476, -0.01611323282122612, -0.05594925209879875, 0.10578825324773788, 0.06066219508647919, 0.039240140467882156, -0.038675691932439804, 0.015769707038998604, 0.013364261016249657, -0.000025475532311247662, 0.00891657080501318, -0.04217425733804703, -0.026521703228354454, -0.020908547565340996, -0.18246018886566162, 0.03156065195798874, -0.09167473018169403, -0.026870183646678925, -0.01265683677047491, -0.026886604726314545, -0.026739269495010376, 0.040912169963121414, -0.05305792763829231, -0.07456205785274506, -0.06338216364383698, 0.09429444372653961, -0.18351027369499207, 0.003558423835784197, 0.07724764198064804, -0.08752577751874924, 0.07637623697519302, 0.030995240435004234, 0.0024373261258006096, 0.021472128108143806, -0.08356314897537231, -0.03707977011799812, -0.03416246175765991, 0.05226273834705353, 0.03423008695244789, -0.15951721370220184, -0.00700713275000453, 0.01277578342705965, -0.07596469670534134, -0.034036215394735336, 0.018493056297302246, -0.14691784977912903, 0.014720827341079712, 0.028835108503699303, -0.013913732953369617, -0.03343866392970085, 0.0509851835668087, 0.0641494020819664, 0.03310203179717064, 0.10631567239761353, 0.005480013322085142, 0.03108830377459526, -0.1644858419895172, -0.03499032184481621, 0.010661364533007145, -0.015932777896523476, 0.044140927493572235, 0.01473241113126278, 0.03195657208561897, -0.008901216089725494, 0.17529016733169556, -0.037023209035396576, 0.05652065947651863, 0.035417672246694565, 0.012126609683036804, -0.06570544093847275, 0.03888503089547157, 0.07717971503734589, 0.004208615515381098, 0.02135060913860798, 0.025099966675043106, -0.03636357560753822, -0.036041244864463806, -0.06639473885297775, 0.08866369724273682, 0.13993439078330994, 0.14558717608451843, -0.019142646342515945, 0.06855121999979019, -0.17566148936748505, -0.05733482912182808, 0.039025720208883286, -0.06012733280658722, 0.01831670291721821, -0.06806007772684097, 0.04166668280959129, 0.10434512048959732, -0.1357881724834442, 0.11661446839570999, -0.052757252007722855, -0.035468749701976776, -0.004362406209111214, -0.15995411574840546, -0.03964356333017349, 0.03350430354475975, 0.01446029543876648, -0.08414945006370544, 0.09754237532615662, 0.0968085303902626, -0.003962750546634197, -0.005322988145053387, 0.09351464360952377, -0.09727680683135986, -0.09054365754127502, -0.0162504892796278, 0.004078277852386236, 0.014200572855770588, 0.019612781703472137, 0.05889938026666641, 0.020029280334711075, 0.04811449721455574, 0.07878042012453079, 0.08192632347345352, 0.06268583238124847, 0.032926205545663834, -0.01327493879944086, -0.05583766847848892, 0.009350515902042389, -0.02593861147761345, -0.03747355565428734, 0.17753997445106506, 0.046979811042547226, 0.032396961003541946, 0.003856606548652053, 0.19124986231327057, 0.029733700677752495, -0.0723700076341629, -0.1358436793088913, 0.04593801125884056, 0.02841181680560112, 0.006165316794067621, 0.049397654831409454, -0.14050805568695068, 0.04141470789909363, 0.16763806343078613, 0.07080487161874771, 0.021057022735476494, 0.01106925681233406, 0.041121941059827805, 0.02266535721719265, -0.036777932196855545, 0.020973166450858116, 0.030196858569979668, 0.1584203988313675, -0.04077764227986336, 0.06295377761125565, -0.019125696271657944, -0.030634326860308647, -0.020021600648760796, 0.12722334265708923, -0.04541188105940819, 0.026156505569815636, -0.060038890689611435, 0.07079154253005981, -0.05014477297663689, -0.3057973086833954, 0.018392087891697884, -0.07532452046871185, -0.13949328660964966, -0.016756227239966393, 0.024831974878907204, -0.031602416187524796, 0.030033204704523087, 0.04327656328678131, -0.017384227365255356, 0.1373329758644104, 0.020411906763911247, -0.047493547201156616, -0.05465046316385269, 0.09189103543758392, -0.04320589080452919, 0.23854251205921173, -0.015018667094409466, 0.03934410586953163, 0.0989249125123024, -0.0026320063043385744, -0.17451192438602448, 0.028739020228385925, 0.08246924728155136, -0.05815212056040764, 0.06414544582366943, 0.17719000577926636, -0.010448580607771873, 0.09322400391101837, 0.03211604803800583, 0.0107549037784338, 0.04470948874950409, 0.07445969432592392, 0.0357222855091095, -0.0685071274638176, 0.08077333122491837, -0.08116912841796875, 0.14815348386764526, 0.09473922848701477, -0.032445669174194336, 0.009441331960260868, -0.07098877429962158, 0.0753188207745552, -0.026540333405137062, 0.12298539280891418, -0.01711288094520569, -0.1817438155412674, 0.0516485795378685, 0.05605962127447128, 0.056037500500679016, -0.18044549226760864, -0.06415343284606934, 0.09517128765583038, -0.061625491827726364, -0.006464092992246151, 0.11754299700260162, 0.05343398079276085, 0.020554201677441597, -0.056938156485557556, -0.08083879202604294, -0.0015853358199819922, 0.09140119701623917, -0.0831807553768158, -0.026395970955491066 ]
94782c6eecf913fdee8af658f6a8bdabe880f124
# Dataset Card for Evaluation run of Steelskull/VerA-Etheria-55b <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [Steelskull/VerA-Etheria-55b](https://huggingface.co/Steelskull/VerA-Etheria-55b) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_Steelskull__VerA-Etheria-55b", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-01-25T17:11:24.913488](https://huggingface.co/datasets/open-llm-leaderboard/details_Steelskull__VerA-Etheria-55b/blob/main/results_2024-01-25T17-11-24.913488.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.7263827073537332, "acc_stderr": 0.029170013986474255, "acc_norm": 0.7348687053269002, "acc_norm_stderr": 0.029706986665856413, "mc1": 0.379436964504284, "mc1_stderr": 0.016987039266142995, "mc2": 0.5210415817923857, "mc2_stderr": 0.01617919766526897 }, "harness|arc:challenge|25": { "acc": 0.6083617747440273, "acc_stderr": 0.014264122124938218, "acc_norm": 0.6424914675767918, "acc_norm_stderr": 0.014005494275916573 }, "harness|hellaswag|10": { "acc": 0.6434973112925712, "acc_stderr": 0.004779872250633708, "acc_norm": 0.8145787691694881, "acc_norm_stderr": 0.0038784463615532884 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.39, "acc_stderr": 0.04902071300001974, "acc_norm": 0.39, "acc_norm_stderr": 0.04902071300001974 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.674074074074074, "acc_stderr": 0.040491220417025055, "acc_norm": 0.674074074074074, "acc_norm_stderr": 0.040491220417025055 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.8289473684210527, "acc_stderr": 0.0306436070716771, "acc_norm": 0.8289473684210527, "acc_norm_stderr": 0.0306436070716771 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.77, "acc_stderr": 0.04229525846816506, "acc_norm": 0.77, "acc_norm_stderr": 0.04229525846816506 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.7849056603773585, "acc_stderr": 0.02528839450289137, "acc_norm": 0.7849056603773585, "acc_norm_stderr": 0.02528839450289137 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.8680555555555556, "acc_stderr": 0.02830096838204443, "acc_norm": 0.8680555555555556, "acc_norm_stderr": 0.02830096838204443 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.49, "acc_stderr": 0.05024183937956911, "acc_norm": 0.49, "acc_norm_stderr": 0.05024183937956911 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.58, "acc_stderr": 0.04960449637488584, "acc_norm": 0.58, "acc_norm_stderr": 0.04960449637488584 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.36, "acc_stderr": 0.04824181513244218, "acc_norm": 0.36, "acc_norm_stderr": 0.04824181513244218 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.6994219653179191, "acc_stderr": 0.0349610148119118, "acc_norm": 0.6994219653179191, "acc_norm_stderr": 0.0349610148119118 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.45098039215686275, "acc_stderr": 0.049512182523962625, "acc_norm": 0.45098039215686275, "acc_norm_stderr": 0.049512182523962625 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.81, "acc_stderr": 0.039427724440366234, "acc_norm": 0.81, "acc_norm_stderr": 0.039427724440366234 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.7531914893617021, "acc_stderr": 0.02818544130123409, "acc_norm": 0.7531914893617021, "acc_norm_stderr": 0.02818544130123409 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.543859649122807, "acc_stderr": 0.046854730419077895, "acc_norm": 0.543859649122807, "acc_norm_stderr": 0.046854730419077895 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.7448275862068966, "acc_stderr": 0.03632984052707842, "acc_norm": 0.7448275862068966, "acc_norm_stderr": 0.03632984052707842 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.5529100529100529, "acc_stderr": 0.025606723995777025, "acc_norm": 0.5529100529100529, "acc_norm_stderr": 0.025606723995777025 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.5158730158730159, "acc_stderr": 0.044698818540726076, "acc_norm": 0.5158730158730159, "acc_norm_stderr": 0.044698818540726076 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.51, "acc_stderr": 0.05024183937956912, "acc_norm": 0.51, "acc_norm_stderr": 0.05024183937956912 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.9032258064516129, "acc_stderr": 0.016818943416345197, "acc_norm": 0.9032258064516129, "acc_norm_stderr": 0.016818943416345197 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.6157635467980296, "acc_stderr": 0.034223985656575494, "acc_norm": 0.6157635467980296, "acc_norm_stderr": 0.034223985656575494 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.8, "acc_stderr": 0.04020151261036846, "acc_norm": 0.8, "acc_norm_stderr": 0.04020151261036846 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.8363636363636363, "acc_stderr": 0.028887872395487946, "acc_norm": 0.8363636363636363, "acc_norm_stderr": 0.028887872395487946 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.9292929292929293, "acc_stderr": 0.018263105420199505, "acc_norm": 0.9292929292929293, "acc_norm_stderr": 0.018263105420199505 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.9740932642487047, "acc_stderr": 0.01146452335695318, "acc_norm": 0.9740932642487047, "acc_norm_stderr": 0.01146452335695318 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.7743589743589744, "acc_stderr": 0.021193632525148522, "acc_norm": 0.7743589743589744, "acc_norm_stderr": 0.021193632525148522 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.37037037037037035, "acc_stderr": 0.02944316932303154, "acc_norm": 0.37037037037037035, "acc_norm_stderr": 0.02944316932303154 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.8361344537815126, "acc_stderr": 0.02404405494044049, "acc_norm": 0.8361344537815126, "acc_norm_stderr": 0.02404405494044049 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.48344370860927155, "acc_stderr": 0.0408024418562897, "acc_norm": 0.48344370860927155, "acc_norm_stderr": 0.0408024418562897 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.9119266055045872, "acc_stderr": 0.012150743719481685, "acc_norm": 0.9119266055045872, "acc_norm_stderr": 0.012150743719481685 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.6435185185185185, "acc_stderr": 0.032664783315272714, "acc_norm": 0.6435185185185185, "acc_norm_stderr": 0.032664783315272714 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.9068627450980392, "acc_stderr": 0.020397853969426994, "acc_norm": 0.9068627450980392, "acc_norm_stderr": 0.020397853969426994 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.8945147679324894, "acc_stderr": 0.01999556072375853, "acc_norm": 0.8945147679324894, "acc_norm_stderr": 0.01999556072375853 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.8071748878923767, "acc_stderr": 0.02647824096048937, "acc_norm": 0.8071748878923767, "acc_norm_stderr": 0.02647824096048937 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.8473282442748091, "acc_stderr": 0.03154521672005471, "acc_norm": 0.8473282442748091, "acc_norm_stderr": 0.03154521672005471 }, "harness|hendrycksTest-international_law|5": { "acc": 0.859504132231405, "acc_stderr": 0.031722334260021585, "acc_norm": 0.859504132231405, "acc_norm_stderr": 0.031722334260021585 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.8611111111111112, "acc_stderr": 0.03343270062869621, "acc_norm": 0.8611111111111112, "acc_norm_stderr": 0.03343270062869621 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.8957055214723927, "acc_stderr": 0.02401351731943907, "acc_norm": 0.8957055214723927, "acc_norm_stderr": 0.02401351731943907 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.5625, "acc_stderr": 0.04708567521880525, "acc_norm": 0.5625, "acc_norm_stderr": 0.04708567521880525 }, "harness|hendrycksTest-management|5": { "acc": 0.8349514563106796, "acc_stderr": 0.03675668832233188, "acc_norm": 0.8349514563106796, "acc_norm_stderr": 0.03675668832233188 }, "harness|hendrycksTest-marketing|5": { "acc": 0.905982905982906, "acc_stderr": 0.019119892798924978, "acc_norm": 0.905982905982906, "acc_norm_stderr": 0.019119892798924978 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.83, "acc_stderr": 0.03775251680686371, "acc_norm": 0.83, "acc_norm_stderr": 0.03775251680686371 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.8901660280970626, "acc_stderr": 0.011181510503247047, "acc_norm": 0.8901660280970626, "acc_norm_stderr": 0.011181510503247047 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.815028901734104, "acc_stderr": 0.02090397584208303, "acc_norm": 0.815028901734104, "acc_norm_stderr": 0.02090397584208303 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.5318435754189944, "acc_stderr": 0.01668855341561221, "acc_norm": 0.5318435754189944, "acc_norm_stderr": 0.01668855341561221 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.826797385620915, "acc_stderr": 0.02166840025651429, "acc_norm": 0.826797385620915, "acc_norm_stderr": 0.02166840025651429 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.8295819935691319, "acc_stderr": 0.02135534302826404, "acc_norm": 0.8295819935691319, "acc_norm_stderr": 0.02135534302826404 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.8364197530864198, "acc_stderr": 0.020581466138257117, "acc_norm": 0.8364197530864198, "acc_norm_stderr": 0.020581466138257117 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.6099290780141844, "acc_stderr": 0.02909767559946393, "acc_norm": 0.6099290780141844, "acc_norm_stderr": 0.02909767559946393 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.5730117340286832, "acc_stderr": 0.012633353557534416, "acc_norm": 0.5730117340286832, "acc_norm_stderr": 0.012633353557534416 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.7941176470588235, "acc_stderr": 0.02456220431414231, "acc_norm": 0.7941176470588235, "acc_norm_stderr": 0.02456220431414231 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.7843137254901961, "acc_stderr": 0.016639319350313264, "acc_norm": 0.7843137254901961, "acc_norm_stderr": 0.016639319350313264 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.7454545454545455, "acc_stderr": 0.041723430387053825, "acc_norm": 0.7454545454545455, "acc_norm_stderr": 0.041723430387053825 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.8244897959183674, "acc_stderr": 0.024352800722970015, "acc_norm": 0.8244897959183674, "acc_norm_stderr": 0.024352800722970015 }, "harness|hendrycksTest-sociology|5": { "acc": 0.8805970149253731, "acc_stderr": 0.02292879327721974, "acc_norm": 0.8805970149253731, "acc_norm_stderr": 0.02292879327721974 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.91, "acc_stderr": 0.028762349126466115, "acc_norm": 0.91, "acc_norm_stderr": 0.028762349126466115 }, "harness|hendrycksTest-virology|5": { "acc": 0.5542168674698795, "acc_stderr": 0.038695433234721015, "acc_norm": 0.5542168674698795, "acc_norm_stderr": 0.038695433234721015 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.8830409356725146, "acc_stderr": 0.02464806896136615, "acc_norm": 0.8830409356725146, "acc_norm_stderr": 0.02464806896136615 }, "harness|truthfulqa:mc|0": { "mc1": 0.379436964504284, "mc1_stderr": 0.016987039266142995, "mc2": 0.5210415817923857, "mc2_stderr": 0.01617919766526897 }, "harness|winogrande|5": { "acc": 0.7592738752959748, "acc_stderr": 0.012015559212224169 }, "harness|gsm8k|5": { "acc": 0.3980288097043215, "acc_stderr": 0.013483026939074818 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_Steelskull__VerA-Etheria-55b
[ "region:us" ]
2024-01-25T17:13:38+00:00
{"pretty_name": "Evaluation run of Steelskull/VerA-Etheria-55b", "dataset_summary": "Dataset automatically created during the evaluation run of model [Steelskull/VerA-Etheria-55b](https://huggingface.co/Steelskull/VerA-Etheria-55b) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_Steelskull__VerA-Etheria-55b\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-01-25T17:11:24.913488](https://huggingface.co/datasets/open-llm-leaderboard/details_Steelskull__VerA-Etheria-55b/blob/main/results_2024-01-25T17-11-24.913488.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.7263827073537332,\n \"acc_stderr\": 0.029170013986474255,\n \"acc_norm\": 0.7348687053269002,\n \"acc_norm_stderr\": 0.029706986665856413,\n \"mc1\": 0.379436964504284,\n \"mc1_stderr\": 0.016987039266142995,\n \"mc2\": 0.5210415817923857,\n \"mc2_stderr\": 0.01617919766526897\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.6083617747440273,\n \"acc_stderr\": 0.014264122124938218,\n \"acc_norm\": 0.6424914675767918,\n \"acc_norm_stderr\": 0.014005494275916573\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6434973112925712,\n \"acc_stderr\": 0.004779872250633708,\n \"acc_norm\": 0.8145787691694881,\n \"acc_norm_stderr\": 0.0038784463615532884\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.39,\n \"acc_stderr\": 0.04902071300001974,\n \"acc_norm\": 0.39,\n \"acc_norm_stderr\": 0.04902071300001974\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.674074074074074,\n \"acc_stderr\": 0.040491220417025055,\n \"acc_norm\": 0.674074074074074,\n \"acc_norm_stderr\": 0.040491220417025055\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.8289473684210527,\n \"acc_stderr\": 0.0306436070716771,\n \"acc_norm\": 0.8289473684210527,\n \"acc_norm_stderr\": 0.0306436070716771\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.77,\n \"acc_stderr\": 0.04229525846816506,\n \"acc_norm\": 0.77,\n \"acc_norm_stderr\": 0.04229525846816506\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.7849056603773585,\n \"acc_stderr\": 0.02528839450289137,\n \"acc_norm\": 0.7849056603773585,\n \"acc_norm_stderr\": 0.02528839450289137\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.8680555555555556,\n \"acc_stderr\": 0.02830096838204443,\n \"acc_norm\": 0.8680555555555556,\n \"acc_norm_stderr\": 0.02830096838204443\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.49,\n \"acc_stderr\": 0.05024183937956911,\n \"acc_norm\": 0.49,\n \"acc_norm_stderr\": 0.05024183937956911\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.58,\n \"acc_stderr\": 0.04960449637488584,\n \"acc_norm\": 0.58,\n \"acc_norm_stderr\": 0.04960449637488584\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.36,\n \"acc_stderr\": 0.04824181513244218,\n \"acc_norm\": 0.36,\n \"acc_norm_stderr\": 0.04824181513244218\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.6994219653179191,\n \"acc_stderr\": 0.0349610148119118,\n \"acc_norm\": 0.6994219653179191,\n \"acc_norm_stderr\": 0.0349610148119118\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.45098039215686275,\n \"acc_stderr\": 0.049512182523962625,\n \"acc_norm\": 0.45098039215686275,\n \"acc_norm_stderr\": 0.049512182523962625\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.81,\n \"acc_stderr\": 0.039427724440366234,\n \"acc_norm\": 0.81,\n \"acc_norm_stderr\": 0.039427724440366234\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.7531914893617021,\n \"acc_stderr\": 0.02818544130123409,\n \"acc_norm\": 0.7531914893617021,\n \"acc_norm_stderr\": 0.02818544130123409\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.543859649122807,\n \"acc_stderr\": 0.046854730419077895,\n \"acc_norm\": 0.543859649122807,\n \"acc_norm_stderr\": 0.046854730419077895\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.7448275862068966,\n \"acc_stderr\": 0.03632984052707842,\n \"acc_norm\": 0.7448275862068966,\n \"acc_norm_stderr\": 0.03632984052707842\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.5529100529100529,\n \"acc_stderr\": 0.025606723995777025,\n \"acc_norm\": 0.5529100529100529,\n \"acc_norm_stderr\": 0.025606723995777025\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.5158730158730159,\n \"acc_stderr\": 0.044698818540726076,\n \"acc_norm\": 0.5158730158730159,\n \"acc_norm_stderr\": 0.044698818540726076\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.51,\n \"acc_stderr\": 0.05024183937956912,\n \"acc_norm\": 0.51,\n \"acc_norm_stderr\": 0.05024183937956912\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.9032258064516129,\n \"acc_stderr\": 0.016818943416345197,\n \"acc_norm\": 0.9032258064516129,\n \"acc_norm_stderr\": 0.016818943416345197\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.6157635467980296,\n \"acc_stderr\": 0.034223985656575494,\n \"acc_norm\": 0.6157635467980296,\n \"acc_norm_stderr\": 0.034223985656575494\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.8,\n \"acc_stderr\": 0.04020151261036846,\n \"acc_norm\": 0.8,\n \"acc_norm_stderr\": 0.04020151261036846\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.8363636363636363,\n \"acc_stderr\": 0.028887872395487946,\n \"acc_norm\": 0.8363636363636363,\n \"acc_norm_stderr\": 0.028887872395487946\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.9292929292929293,\n \"acc_stderr\": 0.018263105420199505,\n \"acc_norm\": 0.9292929292929293,\n \"acc_norm_stderr\": 0.018263105420199505\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.9740932642487047,\n \"acc_stderr\": 0.01146452335695318,\n \"acc_norm\": 0.9740932642487047,\n \"acc_norm_stderr\": 0.01146452335695318\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.7743589743589744,\n \"acc_stderr\": 0.021193632525148522,\n \"acc_norm\": 0.7743589743589744,\n \"acc_norm_stderr\": 0.021193632525148522\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.37037037037037035,\n \"acc_stderr\": 0.02944316932303154,\n \"acc_norm\": 0.37037037037037035,\n \"acc_norm_stderr\": 0.02944316932303154\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.8361344537815126,\n \"acc_stderr\": 0.02404405494044049,\n \"acc_norm\": 0.8361344537815126,\n \"acc_norm_stderr\": 0.02404405494044049\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.48344370860927155,\n \"acc_stderr\": 0.0408024418562897,\n \"acc_norm\": 0.48344370860927155,\n \"acc_norm_stderr\": 0.0408024418562897\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.9119266055045872,\n \"acc_stderr\": 0.012150743719481685,\n \"acc_norm\": 0.9119266055045872,\n \"acc_norm_stderr\": 0.012150743719481685\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.6435185185185185,\n \"acc_stderr\": 0.032664783315272714,\n \"acc_norm\": 0.6435185185185185,\n \"acc_norm_stderr\": 0.032664783315272714\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.9068627450980392,\n \"acc_stderr\": 0.020397853969426994,\n \"acc_norm\": 0.9068627450980392,\n \"acc_norm_stderr\": 0.020397853969426994\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.8945147679324894,\n \"acc_stderr\": 0.01999556072375853,\n \"acc_norm\": 0.8945147679324894,\n \"acc_norm_stderr\": 0.01999556072375853\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.8071748878923767,\n \"acc_stderr\": 0.02647824096048937,\n \"acc_norm\": 0.8071748878923767,\n \"acc_norm_stderr\": 0.02647824096048937\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.8473282442748091,\n \"acc_stderr\": 0.03154521672005471,\n \"acc_norm\": 0.8473282442748091,\n \"acc_norm_stderr\": 0.03154521672005471\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.859504132231405,\n \"acc_stderr\": 0.031722334260021585,\n \"acc_norm\": 0.859504132231405,\n \"acc_norm_stderr\": 0.031722334260021585\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.8611111111111112,\n \"acc_stderr\": 0.03343270062869621,\n \"acc_norm\": 0.8611111111111112,\n \"acc_norm_stderr\": 0.03343270062869621\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.8957055214723927,\n \"acc_stderr\": 0.02401351731943907,\n \"acc_norm\": 0.8957055214723927,\n \"acc_norm_stderr\": 0.02401351731943907\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.5625,\n \"acc_stderr\": 0.04708567521880525,\n \"acc_norm\": 0.5625,\n \"acc_norm_stderr\": 0.04708567521880525\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.8349514563106796,\n \"acc_stderr\": 0.03675668832233188,\n \"acc_norm\": 0.8349514563106796,\n \"acc_norm_stderr\": 0.03675668832233188\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.905982905982906,\n \"acc_stderr\": 0.019119892798924978,\n \"acc_norm\": 0.905982905982906,\n \"acc_norm_stderr\": 0.019119892798924978\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.83,\n \"acc_stderr\": 0.03775251680686371,\n \"acc_norm\": 0.83,\n \"acc_norm_stderr\": 0.03775251680686371\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.8901660280970626,\n \"acc_stderr\": 0.011181510503247047,\n \"acc_norm\": 0.8901660280970626,\n \"acc_norm_stderr\": 0.011181510503247047\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.815028901734104,\n \"acc_stderr\": 0.02090397584208303,\n \"acc_norm\": 0.815028901734104,\n \"acc_norm_stderr\": 0.02090397584208303\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.5318435754189944,\n \"acc_stderr\": 0.01668855341561221,\n \"acc_norm\": 0.5318435754189944,\n \"acc_norm_stderr\": 0.01668855341561221\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.826797385620915,\n \"acc_stderr\": 0.02166840025651429,\n \"acc_norm\": 0.826797385620915,\n \"acc_norm_stderr\": 0.02166840025651429\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.8295819935691319,\n \"acc_stderr\": 0.02135534302826404,\n \"acc_norm\": 0.8295819935691319,\n \"acc_norm_stderr\": 0.02135534302826404\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.8364197530864198,\n \"acc_stderr\": 0.020581466138257117,\n \"acc_norm\": 0.8364197530864198,\n \"acc_norm_stderr\": 0.020581466138257117\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.6099290780141844,\n \"acc_stderr\": 0.02909767559946393,\n \"acc_norm\": 0.6099290780141844,\n \"acc_norm_stderr\": 0.02909767559946393\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.5730117340286832,\n \"acc_stderr\": 0.012633353557534416,\n \"acc_norm\": 0.5730117340286832,\n \"acc_norm_stderr\": 0.012633353557534416\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.7941176470588235,\n \"acc_stderr\": 0.02456220431414231,\n \"acc_norm\": 0.7941176470588235,\n \"acc_norm_stderr\": 0.02456220431414231\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.7843137254901961,\n \"acc_stderr\": 0.016639319350313264,\n \"acc_norm\": 0.7843137254901961,\n \"acc_norm_stderr\": 0.016639319350313264\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.7454545454545455,\n \"acc_stderr\": 0.041723430387053825,\n \"acc_norm\": 0.7454545454545455,\n \"acc_norm_stderr\": 0.041723430387053825\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.8244897959183674,\n \"acc_stderr\": 0.024352800722970015,\n \"acc_norm\": 0.8244897959183674,\n \"acc_norm_stderr\": 0.024352800722970015\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.8805970149253731,\n \"acc_stderr\": 0.02292879327721974,\n \"acc_norm\": 0.8805970149253731,\n \"acc_norm_stderr\": 0.02292879327721974\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.91,\n \"acc_stderr\": 0.028762349126466115,\n \"acc_norm\": 0.91,\n \"acc_norm_stderr\": 0.028762349126466115\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5542168674698795,\n \"acc_stderr\": 0.038695433234721015,\n \"acc_norm\": 0.5542168674698795,\n \"acc_norm_stderr\": 0.038695433234721015\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.8830409356725146,\n \"acc_stderr\": 0.02464806896136615,\n \"acc_norm\": 0.8830409356725146,\n \"acc_norm_stderr\": 0.02464806896136615\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.379436964504284,\n \"mc1_stderr\": 0.016987039266142995,\n \"mc2\": 0.5210415817923857,\n \"mc2_stderr\": 0.01617919766526897\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7592738752959748,\n \"acc_stderr\": 0.012015559212224169\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.3980288097043215,\n \"acc_stderr\": 0.013483026939074818\n }\n}\n```", "repo_url": "https://huggingface.co/Steelskull/VerA-Etheria-55b", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_01_25T17_11_24.913488", "path": ["**/details_harness|arc:challenge|25_2024-01-25T17-11-24.913488.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-01-25T17-11-24.913488.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_01_25T17_11_24.913488", "path": ["**/details_harness|gsm8k|5_2024-01-25T17-11-24.913488.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-01-25T17-11-24.913488.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_01_25T17_11_24.913488", "path": ["**/details_harness|hellaswag|10_2024-01-25T17-11-24.913488.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-01-25T17-11-24.913488.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_01_25T17_11_24.913488", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-25T17-11-24.913488.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-25T17-11-24.913488.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-25T17-11-24.913488.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-25T17-11-24.913488.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-25T17-11-24.913488.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-25T17-11-24.913488.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-25T17-11-24.913488.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-25T17-11-24.913488.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-25T17-11-24.913488.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-25T17-11-24.913488.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-25T17-11-24.913488.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-25T17-11-24.913488.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-25T17-11-24.913488.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-25T17-11-24.913488.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-25T17-11-24.913488.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-25T17-11-24.913488.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-25T17-11-24.913488.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-25T17-11-24.913488.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-25T17-11-24.913488.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-25T17-11-24.913488.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-25T17-11-24.913488.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-25T17-11-24.913488.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-25T17-11-24.913488.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-25T17-11-24.913488.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-25T17-11-24.913488.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-25T17-11-24.913488.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-25T17-11-24.913488.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-25T17-11-24.913488.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-25T17-11-24.913488.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-25T17-11-24.913488.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-25T17-11-24.913488.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-25T17-11-24.913488.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-25T17-11-24.913488.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-25T17-11-24.913488.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-25T17-11-24.913488.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-25T17-11-24.913488.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-25T17-11-24.913488.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-25T17-11-24.913488.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-25T17-11-24.913488.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-25T17-11-24.913488.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-25T17-11-24.913488.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-25T17-11-24.913488.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-25T17-11-24.913488.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-25T17-11-24.913488.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-25T17-11-24.913488.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-25T17-11-24.913488.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-25T17-11-24.913488.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-25T17-11-24.913488.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-25T17-11-24.913488.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-25T17-11-24.913488.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-25T17-11-24.913488.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-25T17-11-24.913488.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-25T17-11-24.913488.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-25T17-11-24.913488.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-25T17-11-24.913488.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-25T17-11-24.913488.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-25T17-11-24.913488.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-25T17-11-24.913488.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-25T17-11-24.913488.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-25T17-11-24.913488.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-25T17-11-24.913488.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-25T17-11-24.913488.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-25T17-11-24.913488.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-25T17-11-24.913488.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-25T17-11-24.913488.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-25T17-11-24.913488.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-25T17-11-24.913488.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-25T17-11-24.913488.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-25T17-11-24.913488.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-25T17-11-24.913488.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-25T17-11-24.913488.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-25T17-11-24.913488.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-25T17-11-24.913488.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-25T17-11-24.913488.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-25T17-11-24.913488.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-25T17-11-24.913488.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-25T17-11-24.913488.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-25T17-11-24.913488.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-25T17-11-24.913488.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-25T17-11-24.913488.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-25T17-11-24.913488.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-25T17-11-24.913488.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-25T17-11-24.913488.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-25T17-11-24.913488.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-25T17-11-24.913488.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-25T17-11-24.913488.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-25T17-11-24.913488.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-25T17-11-24.913488.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-25T17-11-24.913488.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-25T17-11-24.913488.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-25T17-11-24.913488.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-25T17-11-24.913488.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-25T17-11-24.913488.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-25T17-11-24.913488.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-25T17-11-24.913488.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-25T17-11-24.913488.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-25T17-11-24.913488.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-25T17-11-24.913488.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-25T17-11-24.913488.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-25T17-11-24.913488.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-25T17-11-24.913488.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-25T17-11-24.913488.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-25T17-11-24.913488.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-25T17-11-24.913488.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-25T17-11-24.913488.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-25T17-11-24.913488.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-25T17-11-24.913488.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-25T17-11-24.913488.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-25T17-11-24.913488.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-25T17-11-24.913488.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-25T17-11-24.913488.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-25T17-11-24.913488.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-25T17-11-24.913488.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-25T17-11-24.913488.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_01_25T17_11_24.913488", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-25T17-11-24.913488.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-25T17-11-24.913488.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_01_25T17_11_24.913488", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-25T17-11-24.913488.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-25T17-11-24.913488.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_01_25T17_11_24.913488", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-25T17-11-24.913488.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-25T17-11-24.913488.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_01_25T17_11_24.913488", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-25T17-11-24.913488.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-25T17-11-24.913488.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_01_25T17_11_24.913488", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-25T17-11-24.913488.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-25T17-11-24.913488.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_01_25T17_11_24.913488", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-25T17-11-24.913488.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-25T17-11-24.913488.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_01_25T17_11_24.913488", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-25T17-11-24.913488.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-25T17-11-24.913488.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_01_25T17_11_24.913488", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-25T17-11-24.913488.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-25T17-11-24.913488.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_01_25T17_11_24.913488", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-25T17-11-24.913488.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-25T17-11-24.913488.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_01_25T17_11_24.913488", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-25T17-11-24.913488.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-25T17-11-24.913488.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_01_25T17_11_24.913488", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-25T17-11-24.913488.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-25T17-11-24.913488.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_01_25T17_11_24.913488", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-25T17-11-24.913488.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-25T17-11-24.913488.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_01_25T17_11_24.913488", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-25T17-11-24.913488.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-25T17-11-24.913488.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_01_25T17_11_24.913488", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-25T17-11-24.913488.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-25T17-11-24.913488.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_01_25T17_11_24.913488", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-25T17-11-24.913488.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-25T17-11-24.913488.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_01_25T17_11_24.913488", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-25T17-11-24.913488.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-25T17-11-24.913488.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_01_25T17_11_24.913488", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-25T17-11-24.913488.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-25T17-11-24.913488.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_01_25T17_11_24.913488", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-25T17-11-24.913488.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-25T17-11-24.913488.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_01_25T17_11_24.913488", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-25T17-11-24.913488.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-25T17-11-24.913488.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_01_25T17_11_24.913488", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-25T17-11-24.913488.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-25T17-11-24.913488.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_01_25T17_11_24.913488", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-25T17-11-24.913488.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-25T17-11-24.913488.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_01_25T17_11_24.913488", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-25T17-11-24.913488.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-25T17-11-24.913488.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_01_25T17_11_24.913488", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-25T17-11-24.913488.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-25T17-11-24.913488.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_01_25T17_11_24.913488", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-25T17-11-24.913488.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-25T17-11-24.913488.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_01_25T17_11_24.913488", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-25T17-11-24.913488.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-25T17-11-24.913488.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_01_25T17_11_24.913488", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-25T17-11-24.913488.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-25T17-11-24.913488.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_01_25T17_11_24.913488", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-25T17-11-24.913488.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-25T17-11-24.913488.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_01_25T17_11_24.913488", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-25T17-11-24.913488.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-25T17-11-24.913488.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_01_25T17_11_24.913488", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-25T17-11-24.913488.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-25T17-11-24.913488.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_01_25T17_11_24.913488", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-25T17-11-24.913488.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-25T17-11-24.913488.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_01_25T17_11_24.913488", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-25T17-11-24.913488.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-25T17-11-24.913488.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_01_25T17_11_24.913488", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-25T17-11-24.913488.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-25T17-11-24.913488.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_01_25T17_11_24.913488", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-25T17-11-24.913488.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-25T17-11-24.913488.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_01_25T17_11_24.913488", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-25T17-11-24.913488.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-25T17-11-24.913488.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_01_25T17_11_24.913488", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-25T17-11-24.913488.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-25T17-11-24.913488.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_01_25T17_11_24.913488", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-25T17-11-24.913488.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-25T17-11-24.913488.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_01_25T17_11_24.913488", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-25T17-11-24.913488.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-25T17-11-24.913488.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_01_25T17_11_24.913488", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-25T17-11-24.913488.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-25T17-11-24.913488.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_01_25T17_11_24.913488", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-25T17-11-24.913488.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-25T17-11-24.913488.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_01_25T17_11_24.913488", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-25T17-11-24.913488.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-25T17-11-24.913488.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_01_25T17_11_24.913488", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-25T17-11-24.913488.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-25T17-11-24.913488.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_01_25T17_11_24.913488", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-25T17-11-24.913488.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-25T17-11-24.913488.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_01_25T17_11_24.913488", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-25T17-11-24.913488.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-25T17-11-24.913488.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_01_25T17_11_24.913488", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-25T17-11-24.913488.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-25T17-11-24.913488.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_01_25T17_11_24.913488", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-25T17-11-24.913488.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-25T17-11-24.913488.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_01_25T17_11_24.913488", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-25T17-11-24.913488.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-25T17-11-24.913488.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_01_25T17_11_24.913488", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-25T17-11-24.913488.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-25T17-11-24.913488.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_01_25T17_11_24.913488", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-25T17-11-24.913488.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-25T17-11-24.913488.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_01_25T17_11_24.913488", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-25T17-11-24.913488.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-25T17-11-24.913488.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_01_25T17_11_24.913488", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-25T17-11-24.913488.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-25T17-11-24.913488.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_01_25T17_11_24.913488", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-25T17-11-24.913488.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-25T17-11-24.913488.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_01_25T17_11_24.913488", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-25T17-11-24.913488.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-25T17-11-24.913488.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_01_25T17_11_24.913488", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-25T17-11-24.913488.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-25T17-11-24.913488.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_01_25T17_11_24.913488", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-25T17-11-24.913488.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-25T17-11-24.913488.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_01_25T17_11_24.913488", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-25T17-11-24.913488.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-25T17-11-24.913488.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_01_25T17_11_24.913488", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-25T17-11-24.913488.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-25T17-11-24.913488.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_01_25T17_11_24.913488", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-25T17-11-24.913488.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-25T17-11-24.913488.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_01_25T17_11_24.913488", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-25T17-11-24.913488.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-25T17-11-24.913488.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_01_25T17_11_24.913488", "path": ["**/details_harness|winogrande|5_2024-01-25T17-11-24.913488.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-01-25T17-11-24.913488.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_01_25T17_11_24.913488", "path": ["results_2024-01-25T17-11-24.913488.parquet"]}, {"split": "latest", "path": ["results_2024-01-25T17-11-24.913488.parquet"]}]}]}
2024-01-25T17:14:01+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of Steelskull/VerA-Etheria-55b Dataset automatically created during the evaluation run of model Steelskull/VerA-Etheria-55b on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-01-25T17:11:24.913488(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of Steelskull/VerA-Etheria-55b\n\n\n\nDataset automatically created during the evaluation run of model Steelskull/VerA-Etheria-55b on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-25T17:11:24.913488(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of Steelskull/VerA-Etheria-55b\n\n\n\nDataset automatically created during the evaluation run of model Steelskull/VerA-Etheria-55b on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-25T17:11:24.913488(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ 6, 185, 68, 4, 40, 29, 3, 4, 9, 6, 5, 7, 4, 7, 10, 9, 5, 9, 8, 10, 46, 8, 7, 10, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of Steelskull/VerA-Etheria-55b\n\n\n\nDataset automatically created during the evaluation run of model Steelskull/VerA-Etheria-55b on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2024-01-25T17:11:24.913488(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):## Dataset Details### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:## Uses### Direct Use### Out-of-Scope Use## Dataset Structure## Dataset Creation### Curation Rationale### Source Data#### Data Collection and Processing#### Who are the source data producers?### Annotations [optional]#### Annotation process#### Who are the annotators?#### Personal and Sensitive Information## Bias, Risks, and Limitations### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:## Glossary [optional]## More Information [optional]## Dataset Card Authors [optional]## Dataset Card Contact" ]
[ -0.05984930321574211, 0.20439833402633667, -0.005040573421865702, 0.04129769653081894, 0.08424404263496399, -0.013120888732373714, 0.04796227067708969, 0.0968303233385086, 0.0246271975338459, 0.19404900074005127, -0.02933506667613983, 0.11453323066234589, 0.06693542748689651, 0.12938441336154938, 0.027519233524799347, -0.12603774666786194, 0.027536556124687195, -0.08571905642747879, 0.10764709115028381, 0.08167629688978195, 0.057429153472185135, -0.0776013508439064, 0.06539002805948257, -0.02584044076502323, 0.028050225228071213, -0.006701199803501368, -0.07788970321416855, -0.019905852153897285, 0.10410688817501068, 0.103745236992836, 0.041754815727472305, -0.01753431186079979, 0.021886246278882027, -0.25990259647369385, 0.016319479793310165, 0.09274392575025558, -0.006464068312197924, 0.034106768667697906, 0.12131823599338531, -0.07856615632772446, 0.0739036574959755, -0.02607855200767517, 0.08823437243700027, 0.05152200162410736, -0.1050143614411354, -0.1551690697669983, -0.14906556904315948, 0.018237102776765823, 0.05611878260970116, 0.040151163935661316, -0.03058016486465931, 0.16445448994636536, -0.06854236871004105, 0.04463290795683861, 0.12339939922094345, -0.1013294905424118, -0.016903521493077278, 0.03988892585039139, 0.008818678557872772, 0.08238154649734497, -0.08248184621334076, -0.022402236238121986, 0.030214477330446243, 0.05969230458140373, -0.0020205918699502945, 0.016773171722888947, -0.03286884352564812, 0.013977725058794022, -0.13461916148662567, -0.11830510944128036, 0.14737194776535034, -0.0003737323568202555, -0.05359569564461708, -0.18238331377506256, -0.007141892332583666, 0.02563934586942196, 0.003366143675521016, 0.001714478712528944, -0.0028571980074048042, -0.02256789803504944, 0.09479430317878723, -0.004064182750880718, -0.10361000895500183, -0.036005839705467224, -0.006843931507319212, 0.0824136883020401, 0.023064272478222847, -0.013804673217236996, 0.016228102147579193, 0.11844062060117722, 0.012528633698821068, -0.06065741926431656, -0.06999748945236206, -0.057178497314453125, -0.13166572153568268, -0.04522097855806351, 0.012225357815623283, -0.06855008751153946, 0.031134260818362236, 0.23789004981517792, -0.005334397312253714, 0.03000449761748314, -0.11468851566314697, 0.013056814670562744, 0.11230060458183289, 0.057517923414707184, -0.0841866135597229, -0.03716081753373146, -0.04290778562426567, 0.02999991923570633, 0.03272836655378342, -0.016687534749507904, 0.006024995353072882, 0.053346071392297745, 0.025919131934642792, 0.12178244441747665, 0.11547154933214188, 0.03530438616871834, -0.08316339552402496, -0.016843941062688828, 0.23299084603786469, -0.14028489589691162, -0.015333308838307858, 0.020104747265577316, -0.027654804289340973, -0.10695517808198929, 0.06545723974704742, -0.003981798887252808, -0.054878320544958115, 0.13285914063453674, -0.052640918642282486, -0.0855989158153534, -0.07047931104898453, -0.06209789589047432, 0.05263018608093262, 0.016913345083594322, -0.048829127103090286, -0.06746511161327362, -0.10373140871524811, -0.0825885608792305, 0.02848741039633751, -0.06178898736834526, -0.033422403037548065, 0.02777145244181156, 0.005758271552622318, -0.010649602860212326, -0.015604358166456223, 0.11996757239103317, -0.05452262610197067, 0.03794492036104202, -0.01515083760023117, 0.0119764544069767, 0.09674154222011566, 0.04214667156338692, -0.11378254741430283, 0.07282771915197372, -0.13106957077980042, 0.097462497651577, -0.12056522816419601, -0.007842001505196095, -0.12039098143577576, -0.004707886837422848, -0.04552633315324783, 0.04604366421699524, -0.0286497063934803, 0.08618044853210449, -0.21889063715934753, -0.007749804295599461, 0.14924322068691254, -0.1307162046432495, -0.07602158933877945, 0.0951564833521843, -0.04358021542429924, 0.06577470153570175, 0.05079273879528046, 0.110956571996212, 0.10051348805427551, -0.08619812875986099, -0.09126820415258408, -0.05933750793337822, -0.028906576335430145, 0.15938453376293182, 0.06625514477491379, -0.08835989236831665, 0.10262506455183029, 0.05585663020610809, -0.02581464685499668, -0.08102042973041534, 0.006041755899786949, -0.06877631694078445, -0.024150731042027473, -0.06852244585752487, -0.04583530128002167, -0.00043425793410278857, -0.0739055871963501, -0.014240612275898457, -0.08828852325677872, 0.0008774579619057477, 0.10084521770477295, -0.025875108316540718, 0.007199688348919153, -0.06668610125780106, 0.03825768828392029, 0.013811473734676838, 0.01153003703802824, -0.21364741027355194, -0.1005052775144577, 0.03281288966536522, -0.20224925875663757, 0.056775350123643875, 0.040499720722436905, 0.009103203192353249, 0.04351302608847618, -0.0077391136437654495, 0.028959909453988075, 0.015452963300049305, -0.012444197200238705, -0.010368423536419868, -0.15663978457450867, -0.0577322356402874, -0.08723121881484985, 0.09742679446935654, -0.13846704363822937, -0.011157426051795483, 0.07085912674665451, 0.1487514078617096, 0.022230098024010658, -0.07805746793746948, 0.05453142523765564, 0.013616393320262432, -0.03806755319237709, -0.04993328079581261, -0.0027488020714372396, -0.02480994537472725, 0.03557534143328667, 0.03883694112300873, -0.20171771943569183, -0.08471079915761948, 0.06391407549381256, 0.13517236709594727, -0.0783567875623703, -0.10219898819923401, -0.06052043288946152, -0.062043268233537674, -0.09033884108066559, -0.07870438694953918, 0.07603894919157028, 0.08561329543590546, 0.03573257103562355, -0.07400564849376678, -0.0509716235101223, 0.004307636991143227, 0.0576586052775383, -0.0648188665509224, 0.11325617879629135, 0.06935511529445648, -0.07200159132480621, 0.11046715825796127, -0.06546908617019653, 0.10444235801696777, 0.08439184725284576, 0.03302745521068573, -0.10194169729948044, 0.0038588333409279585, 0.06514234840869904, 0.046001821756362915, 0.08506286144256592, -0.06265975534915924, 0.03440428152680397, 0.08245831727981567, -0.015422843396663666, 0.04013407975435257, -0.06296756118535995, 0.022420261055231094, 0.029411697760224342, 0.008361835964024067, 0.005344964563846588, 0.013645335100591183, 0.019528526812791824, 0.08224442601203918, 0.025197168812155724, 0.12214142084121704, -0.027267426252365112, -0.051880933344364166, -0.10813923180103302, 0.15120360255241394, -0.07832127064466476, -0.26207441091537476, -0.1752316951751709, -0.025181813165545464, -0.0177569892257452, -0.016700278967618942, 0.05949234217405319, -0.011960218660533428, -0.11521153897047043, -0.10790076106786728, 0.06718259304761887, 0.040458809584379196, -0.13239990174770355, -0.04900885000824928, 0.05537829548120499, -0.008927851915359497, -0.16683566570281982, 0.04347456246614456, 0.04478539153933525, -0.04744338244199753, -0.007978944107890129, 0.07197930663824081, 0.10982435196638107, 0.09500215202569962, 0.08833417296409607, -0.026502784341573715, -0.008006367832422256, 0.16494311392307281, -0.11294965445995331, 0.02934175357222557, 0.10034668445587158, -0.06859488785266876, 0.06898577511310577, 0.16759592294692993, 0.015464888885617256, -0.08028184622526169, 0.056500986218452454, 0.101118303835392, -0.07036744058132172, -0.24261240661144257, -0.1262449324131012, -0.03537258505821228, 0.014552365057170391, 0.10720182210206985, 0.0662543848156929, 0.014651631936430931, 0.014362385496497154, -0.12864473462104797, -0.026371313259005547, -0.04557238519191742, 0.06877098977565765, 0.06045468524098396, 0.0010176897048950195, 0.04688087850809097, -0.044452376663684845, 0.01784178987145424, 0.11698317527770996, 0.052264079451560974, 0.15070556104183197, -0.04691389948129654, 0.18692995607852936, 0.09159965068101883, 0.07566792517900467, -0.03666101396083832, 0.04333249852061272, -0.023294296115636826, 0.05950217694044113, -0.012570735067129135, -0.11021393537521362, -0.03570549935102463, 0.10560162365436554, 0.03842905908823013, -0.07516615837812424, 0.03938598185777664, -0.09470824152231216, 0.03744775429368019, 0.1978733092546463, -0.03261144459247589, -0.12869206070899963, -0.06381987780332565, 0.06412889808416367, -0.031688474118709564, -0.08721370249986649, -0.02514343336224556, 0.08134794235229492, -0.14312031865119934, 0.013379089534282684, -0.039062097668647766, 0.07418623566627502, -0.12417985498905182, -0.026486340910196304, -0.025947315618395805, 0.03867090120911598, -0.006559045519679785, 0.11446329951286316, -0.11708274483680725, 0.09142264723777771, -0.00046212508459575474, 0.02779475972056389, -0.10691573470830917, 0.05465212091803551, -0.0313289538025856, -0.058734532445669174, 0.135182186961174, -0.012856204994022846, -0.07162729650735855, -0.05276668444275856, -0.09962616115808487, -0.003999918699264526, 0.04933210834860802, -0.1013287827372551, 0.10577242076396942, 0.02404661662876606, -0.01679227128624916, -0.02477227710187435, -0.01480928249657154, -0.10871674865484238, -0.24787648022174835, 0.11915341764688492, -0.14314831793308258, 0.02784179523587227, -0.06854217499494553, -0.0578758530318737, -0.04453889653086662, 0.1487908512353897, -0.09828242659568787, -0.05942058563232422, -0.11073716729879379, -0.017898648977279663, 0.18795925378799438, -0.05194912478327751, 0.061035867780447006, -0.046365831047296524, 0.1932404488325119, -0.024621855467557907, -0.04163975268602371, -0.008300499059259892, -0.076674684882164, -0.19297099113464355, -0.04387437924742699, 0.10647431015968323, 0.07882536202669144, 0.008865978568792343, -0.0077202897518873215, 0.01890813373029232, 0.008255081251263618, -0.10175345838069916, 0.02973359078168869, 0.10344721376895905, 0.1232931837439537, 0.050256457179784775, -0.0407753586769104, -0.13004593551158905, -0.09753793478012085, -0.10546852648258209, 0.03623694181442261, 0.17291764914989471, -0.06641727685928345, 0.16762447357177734, 0.1667301505804062, -0.09381714463233948, -0.20021632313728333, -0.05040467903017998, 0.01713264174759388, -0.015602819621562958, 0.1434565782546997, -0.19914746284484863, 0.06734739989042282, 0.07047712802886963, -0.029291415587067604, 0.11396417766809464, -0.26179811358451843, -0.14048968255519867, 0.03366910293698311, 0.040688406676054, -0.23487791419029236, -0.1710216999053955, -0.10583503544330597, -0.022601822391152382, -0.15410153567790985, 0.13116036355495453, -0.009088736958801746, 0.02687349170446396, -0.02222794108092785, 0.09616547077894211, 0.054250895977020264, -0.07684523612260818, 0.1278364062309265, -0.015960033982992172, 0.02128991298377514, -0.09845304489135742, -0.04562355950474739, 0.005302881356328726, -0.04239952936768532, 0.08617435395717621, 0.010403186082839966, 0.05344318971037865, -0.07238130271434784, -0.04178430885076523, -0.0741269439458847, 0.05852827802300453, -0.08172120898962021, -0.05446891859173775, -0.07236921787261963, 0.08427941054105759, 0.0725083127617836, -0.01628095656633377, 0.009246330708265305, -0.04884841665625572, 0.061506886035203934, 0.18755535781383514, 0.10928551107645035, 0.05365581810474396, -0.09884092956781387, -0.028969386592507362, -0.015829119831323624, -0.005680471193045378, -0.10041084885597229, 0.05071360245347023, 0.08317411690950394, 0.05068013072013855, 0.0883120447397232, -0.02361850067973137, -0.18552614748477936, 0.0007463852525688708, 0.07657557725906372, -0.08893661946058273, -0.19105057418346405, 0.04463500529527664, 0.14346203207969666, -0.15745681524276733, -0.0689118504524231, 0.07046036422252655, 0.02492647059261799, -0.0393725261092186, -0.0004056825418956578, 0.0757651999592781, 0.053310852497816086, 0.10379841923713684, 0.017833830788731575, 0.050025925040245056, -0.06990198791027069, 0.08777396380901337, 0.1257559359073639, -0.1155049204826355, 0.007989896461367607, 0.03089548833668232, -0.0579008050262928, -0.07326428592205048, -0.0008730982081033289, 0.016223279759287834, 0.02429165504872799, -0.029958831146359444, 0.015442064963281155, -0.03494010493159294, 0.055292099714279175, 0.1321294605731964, 0.0003581260971259326, 0.055082619190216064, 0.01459602639079094, 0.0046918680891394615, -0.0564001239836216, 0.10496003180742264, 0.030292311683297157, 0.047648705542087555, -0.04195601865649223, 0.03448272868990898, 0.021578090265393257, -0.03302980586886406, 0.01879962533712387, -0.047157902270555496, -0.07656022906303406, 0.00022497129975818098, -0.15810762345790863, 0.05411514639854431, -0.08178476989269257, 0.009435499086976051, -0.007732550613582134, -0.02719125524163246, -0.008072254247963428, 0.007026028353720903, -0.07895911484956741, -0.039339881390333176, -0.04680244252085686, 0.1302827149629593, -0.19735674560070038, 0.0061841001734137535, 0.09631304442882538, -0.07127007097005844, 0.06914519518613815, -0.011464495211839676, -0.02238854393362999, 0.0328073687851429, -0.11172770708799362, 0.015936927869915962, -0.026951147243380547, 0.058192141354084015, 0.008832727558910847, -0.12885554134845734, -0.017508303746581078, -0.0014093708014115691, -0.06463354080915451, -0.006895762402564287, 0.039897553622722626, -0.14873650670051575, 0.06779379397630692, 0.08941089361906052, -0.06278424710035324, -0.04378572106361389, 0.05268115550279617, 0.049277413636446, 0.0017520623514428735, 0.10367348790168762, -0.004608294926583767, 0.021359533071517944, -0.1525307446718216, -0.043869610875844955, 0.007525231689214706, 0.014302135445177555, 0.046853333711624146, 0.016720231622457504, 0.021520042791962624, 0.00879533588886261, 0.2424962818622589, -0.018607622012495995, 0.027396341785788536, 0.024944189935922623, -0.007577163632959127, -0.028131473809480667, 0.03173530101776123, 0.011087232269346714, -0.017787320539355278, 0.0319165401160717, 0.008252579718828201, -0.045878078788518906, -0.0764659196138382, -0.027767473831772804, 0.07453420758247375, 0.14264816045761108, 0.1607178896665573, -0.04503023624420166, 0.08080211281776428, -0.16605736315250397, -0.0725981667637825, 0.00242191762663424, -0.05720489099621773, 0.037127986550331116, -0.07868074625730515, 0.07803942263126373, 0.0885133445262909, -0.09467758238315582, 0.15294404327869415, -0.05413348972797394, -0.0332854688167572, -0.035958901047706604, -0.16575461626052856, -0.03924132138490677, 0.037674613296985626, -0.001365781296044588, -0.08363107591867447, 0.11513881385326385, 0.13476337492465973, -0.013691190630197525, -0.0010108078131452203, 0.09192446619272232, -0.042939841747283936, -0.05775938183069229, -0.0331534780561924, 0.006273272912949324, 0.004368436988443136, 0.009064294397830963, 0.07854851335287094, 0.015396746806800365, 0.06177271902561188, 0.07108131051063538, 0.09510631114244461, 0.021073024719953537, -0.0047914981842041016, -0.04334438592195511, -0.04612504690885544, 0.0018095794366672635, -0.026546403765678406, -0.0667160302400589, 0.1974586695432663, 0.04675144702196121, 0.010154150426387787, 0.007668330799788237, 0.20796644687652588, -0.0011423988034948707, -0.06150508299469948, -0.13681039214134216, 0.12683981657028198, -0.01492121908813715, 0.02329992689192295, 0.03190223127603531, -0.11506848782300949, 0.03890112414956093, 0.1578821837902069, 0.09118369221687317, 0.05659587308764458, 0.014918140135705471, 0.04548247903585434, 0.024267835542559624, -0.038902319967746735, 0.0634356364607811, 0.019633876159787178, 0.24492648243904114, -0.05527537688612938, 0.09175272285938263, -0.009609147906303406, 0.0002612127864267677, -0.016872668638825417, 0.10813684016466141, -0.04996703937649727, 0.02409237064421177, -0.08026580512523651, 0.09521941840648651, -0.05879295989871025, -0.2591480314731598, -0.0035744085907936096, -0.07637912780046463, -0.1455780416727066, -0.012603763490915298, 0.032591015100479126, -0.042312346398830414, 0.0448402464389801, 0.029040327295660973, -0.026280414313077927, 0.17948640882968903, 0.0051181926392018795, -0.07077246159315109, -0.08397092670202255, 0.07344939559698105, -0.019259540364146233, 0.2813663184642792, -0.011578366160392761, 0.06015189737081528, 0.08854777365922928, -0.016691237688064575, -0.1260497123003006, 0.013868902809917927, 0.08703009784221649, -0.058953944593667984, 0.054035987704992294, 0.16506487131118774, -0.030442988499999046, 0.14729228615760803, 0.030748559162020683, -0.020267583429813385, 0.07516588270664215, 0.06994190067052841, 0.03563776984810829, -0.09907491505146027, 0.0754714384675026, -0.09493082016706467, 0.13454395532608032, 0.10175316035747528, -0.007746824063360691, -0.00867911335080862, -0.05876491963863373, 0.06345739960670471, -0.032178834080696106, 0.1542958915233612, -0.017333969473838806, -0.17605671286582947, 0.04672231525182724, 0.032285332679748535, 0.0668041780591011, -0.2487446516752243, -0.056451473385095596, 0.1060074046254158, -0.05023961514234543, 0.01825874298810959, 0.08726558089256287, 0.0442994050681591, 0.02579561620950699, -0.05395899713039398, -0.13021117448806763, 0.017343800514936447, 0.12649615108966827, -0.08957048505544662, -0.03273116424679756 ]
15713169d755572f89d9513af0adcd2f48a98d71
# Dataset Card for Evaluation run of Steelskull/VerB-Etheria-55b <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [Steelskull/VerB-Etheria-55b](https://huggingface.co/Steelskull/VerB-Etheria-55b) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_Steelskull__VerB-Etheria-55b", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-01-25T17:11:57.529002](https://huggingface.co/datasets/open-llm-leaderboard/details_Steelskull__VerB-Etheria-55b/blob/main/results_2024-01-25T17-11-57.529002.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.7273568607295041, "acc_stderr": 0.029263863644913724, "acc_norm": 0.7377743224385701, "acc_norm_stderr": 0.02981943493187247, "mc1": 0.3990208078335373, "mc1_stderr": 0.01714282572849677, "mc2": 0.575213422471882, "mc2_stderr": 0.01606436002486393 }, "harness|arc:challenge|25": { "acc": 0.6279863481228669, "acc_stderr": 0.014124597881844461, "acc_norm": 0.659556313993174, "acc_norm_stderr": 0.013847460518892973 }, "harness|hellaswag|10": { "acc": 0.6437960565624378, "acc_stderr": 0.004778978031389639, "acc_norm": 0.8147779326827326, "acc_norm_stderr": 0.003876836709461124 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.41, "acc_stderr": 0.049431107042371025, "acc_norm": 0.41, "acc_norm_stderr": 0.049431107042371025 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.6666666666666666, "acc_stderr": 0.04072314811876837, "acc_norm": 0.6666666666666666, "acc_norm_stderr": 0.04072314811876837 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.8618421052631579, "acc_stderr": 0.028081042939576552, "acc_norm": 0.8618421052631579, "acc_norm_stderr": 0.028081042939576552 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.77, "acc_stderr": 0.04229525846816505, "acc_norm": 0.77, "acc_norm_stderr": 0.04229525846816505 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.7735849056603774, "acc_stderr": 0.025757559893106723, "acc_norm": 0.7735849056603774, "acc_norm_stderr": 0.025757559893106723 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.8611111111111112, "acc_stderr": 0.028919802956134905, "acc_norm": 0.8611111111111112, "acc_norm_stderr": 0.028919802956134905 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.55, "acc_stderr": 0.04999999999999999, "acc_norm": 0.55, "acc_norm_stderr": 0.04999999999999999 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.6, "acc_stderr": 0.049236596391733084, "acc_norm": 0.6, "acc_norm_stderr": 0.049236596391733084 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.36, "acc_stderr": 0.04824181513244218, "acc_norm": 0.36, "acc_norm_stderr": 0.04824181513244218 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.6994219653179191, "acc_stderr": 0.0349610148119118, "acc_norm": 0.6994219653179191, "acc_norm_stderr": 0.0349610148119118 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.47058823529411764, "acc_stderr": 0.04966570903978529, "acc_norm": 0.47058823529411764, "acc_norm_stderr": 0.04966570903978529 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.8, "acc_stderr": 0.04020151261036845, "acc_norm": 0.8, "acc_norm_stderr": 0.04020151261036845 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.7446808510638298, "acc_stderr": 0.02850485647051426, "acc_norm": 0.7446808510638298, "acc_norm_stderr": 0.02850485647051426 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.5526315789473685, "acc_stderr": 0.04677473004491199, "acc_norm": 0.5526315789473685, "acc_norm_stderr": 0.04677473004491199 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.7448275862068966, "acc_stderr": 0.03632984052707842, "acc_norm": 0.7448275862068966, "acc_norm_stderr": 0.03632984052707842 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.5582010582010583, "acc_stderr": 0.025576257061253833, "acc_norm": 0.5582010582010583, "acc_norm_stderr": 0.025576257061253833 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.5, "acc_stderr": 0.04472135954999579, "acc_norm": 0.5, "acc_norm_stderr": 0.04472135954999579 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.5, "acc_stderr": 0.050251890762960605, "acc_norm": 0.5, "acc_norm_stderr": 0.050251890762960605 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.9, "acc_stderr": 0.017066403719657255, "acc_norm": 0.9, "acc_norm_stderr": 0.017066403719657255 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.5911330049261084, "acc_stderr": 0.03459058815883232, "acc_norm": 0.5911330049261084, "acc_norm_stderr": 0.03459058815883232 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.81, "acc_stderr": 0.03942772444036624, "acc_norm": 0.81, "acc_norm_stderr": 0.03942772444036624 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.8303030303030303, "acc_stderr": 0.029311188674983116, "acc_norm": 0.8303030303030303, "acc_norm_stderr": 0.029311188674983116 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.9292929292929293, "acc_stderr": 0.018263105420199505, "acc_norm": 0.9292929292929293, "acc_norm_stderr": 0.018263105420199505 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.9792746113989638, "acc_stderr": 0.010281417011909032, "acc_norm": 0.9792746113989638, "acc_norm_stderr": 0.010281417011909032 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.782051282051282, "acc_stderr": 0.020932445774463196, "acc_norm": 0.782051282051282, "acc_norm_stderr": 0.020932445774463196 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.3962962962962963, "acc_stderr": 0.029822619458533997, "acc_norm": 0.3962962962962963, "acc_norm_stderr": 0.029822619458533997 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.8529411764705882, "acc_stderr": 0.023005459446673957, "acc_norm": 0.8529411764705882, "acc_norm_stderr": 0.023005459446673957 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.4768211920529801, "acc_stderr": 0.04078093859163083, "acc_norm": 0.4768211920529801, "acc_norm_stderr": 0.04078093859163083 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.9064220183486239, "acc_stderr": 0.01248684182460197, "acc_norm": 0.9064220183486239, "acc_norm_stderr": 0.01248684182460197 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.6527777777777778, "acc_stderr": 0.032468872436376486, "acc_norm": 0.6527777777777778, "acc_norm_stderr": 0.032468872436376486 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.9019607843137255, "acc_stderr": 0.020871118455552104, "acc_norm": 0.9019607843137255, "acc_norm_stderr": 0.020871118455552104 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.8945147679324894, "acc_stderr": 0.019995560723758535, "acc_norm": 0.8945147679324894, "acc_norm_stderr": 0.019995560723758535 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.8026905829596412, "acc_stderr": 0.02670985334496796, "acc_norm": 0.8026905829596412, "acc_norm_stderr": 0.02670985334496796 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.8396946564885496, "acc_stderr": 0.03217829420744631, "acc_norm": 0.8396946564885496, "acc_norm_stderr": 0.03217829420744631 }, "harness|hendrycksTest-international_law|5": { "acc": 0.8677685950413223, "acc_stderr": 0.03092278832044579, "acc_norm": 0.8677685950413223, "acc_norm_stderr": 0.03092278832044579 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.8611111111111112, "acc_stderr": 0.0334327006286962, "acc_norm": 0.8611111111111112, "acc_norm_stderr": 0.0334327006286962 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.8834355828220859, "acc_stderr": 0.025212327210507104, "acc_norm": 0.8834355828220859, "acc_norm_stderr": 0.025212327210507104 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.6071428571428571, "acc_stderr": 0.04635550135609976, "acc_norm": 0.6071428571428571, "acc_norm_stderr": 0.04635550135609976 }, "harness|hendrycksTest-management|5": { "acc": 0.8349514563106796, "acc_stderr": 0.03675668832233188, "acc_norm": 0.8349514563106796, "acc_norm_stderr": 0.03675668832233188 }, "harness|hendrycksTest-marketing|5": { "acc": 0.9102564102564102, "acc_stderr": 0.018724301741941646, "acc_norm": 0.9102564102564102, "acc_norm_stderr": 0.018724301741941646 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.83, "acc_stderr": 0.03775251680686371, "acc_norm": 0.83, "acc_norm_stderr": 0.03775251680686371 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.8876117496807152, "acc_stderr": 0.011294541351216533, "acc_norm": 0.8876117496807152, "acc_norm_stderr": 0.011294541351216533 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.8179190751445087, "acc_stderr": 0.020776761102512965, "acc_norm": 0.8179190751445087, "acc_norm_stderr": 0.020776761102512965 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.6324022346368715, "acc_stderr": 0.016125543823552944, "acc_norm": 0.6324022346368715, "acc_norm_stderr": 0.016125543823552944 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.8169934640522876, "acc_stderr": 0.02214076751288097, "acc_norm": 0.8169934640522876, "acc_norm_stderr": 0.02214076751288097 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.8167202572347267, "acc_stderr": 0.021974198848265823, "acc_norm": 0.8167202572347267, "acc_norm_stderr": 0.021974198848265823 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.8240740740740741, "acc_stderr": 0.021185893615225153, "acc_norm": 0.8240740740740741, "acc_norm_stderr": 0.021185893615225153 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.624113475177305, "acc_stderr": 0.028893955412115882, "acc_norm": 0.624113475177305, "acc_norm_stderr": 0.028893955412115882 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.5684485006518905, "acc_stderr": 0.012650007999463902, "acc_norm": 0.5684485006518905, "acc_norm_stderr": 0.012650007999463902 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.7977941176470589, "acc_stderr": 0.024398192986654924, "acc_norm": 0.7977941176470589, "acc_norm_stderr": 0.024398192986654924 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.7875816993464052, "acc_stderr": 0.016547148636203147, "acc_norm": 0.7875816993464052, "acc_norm_stderr": 0.016547148636203147 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.7272727272727273, "acc_stderr": 0.04265792110940588, "acc_norm": 0.7272727272727273, "acc_norm_stderr": 0.04265792110940588 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.8204081632653061, "acc_stderr": 0.024573293589585637, "acc_norm": 0.8204081632653061, "acc_norm_stderr": 0.024573293589585637 }, "harness|hendrycksTest-sociology|5": { "acc": 0.8805970149253731, "acc_stderr": 0.02292879327721974, "acc_norm": 0.8805970149253731, "acc_norm_stderr": 0.02292879327721974 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.88, "acc_stderr": 0.03265986323710906, "acc_norm": 0.88, "acc_norm_stderr": 0.03265986323710906 }, "harness|hendrycksTest-virology|5": { "acc": 0.5542168674698795, "acc_stderr": 0.038695433234721015, "acc_norm": 0.5542168674698795, "acc_norm_stderr": 0.038695433234721015 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.8538011695906432, "acc_stderr": 0.027097290118070827, "acc_norm": 0.8538011695906432, "acc_norm_stderr": 0.027097290118070827 }, "harness|truthfulqa:mc|0": { "mc1": 0.3990208078335373, "mc1_stderr": 0.01714282572849677, "mc2": 0.575213422471882, "mc2_stderr": 0.01606436002486393 }, "harness|winogrande|5": { "acc": 0.7545382794001578, "acc_stderr": 0.012095272937183653 }, "harness|gsm8k|5": { "acc": 0.2880970432145565, "acc_stderr": 0.012474469737197923 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_Steelskull__VerB-Etheria-55b
[ "region:us" ]
2024-01-25T17:14:11+00:00
{"pretty_name": "Evaluation run of Steelskull/VerB-Etheria-55b", "dataset_summary": "Dataset automatically created during the evaluation run of model [Steelskull/VerB-Etheria-55b](https://huggingface.co/Steelskull/VerB-Etheria-55b) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_Steelskull__VerB-Etheria-55b\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-01-25T17:11:57.529002](https://huggingface.co/datasets/open-llm-leaderboard/details_Steelskull__VerB-Etheria-55b/blob/main/results_2024-01-25T17-11-57.529002.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.7273568607295041,\n \"acc_stderr\": 0.029263863644913724,\n \"acc_norm\": 0.7377743224385701,\n \"acc_norm_stderr\": 0.02981943493187247,\n \"mc1\": 0.3990208078335373,\n \"mc1_stderr\": 0.01714282572849677,\n \"mc2\": 0.575213422471882,\n \"mc2_stderr\": 0.01606436002486393\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.6279863481228669,\n \"acc_stderr\": 0.014124597881844461,\n \"acc_norm\": 0.659556313993174,\n \"acc_norm_stderr\": 0.013847460518892973\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6437960565624378,\n \"acc_stderr\": 0.004778978031389639,\n \"acc_norm\": 0.8147779326827326,\n \"acc_norm_stderr\": 0.003876836709461124\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.41,\n \"acc_stderr\": 0.049431107042371025,\n \"acc_norm\": 0.41,\n \"acc_norm_stderr\": 0.049431107042371025\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.6666666666666666,\n \"acc_stderr\": 0.04072314811876837,\n \"acc_norm\": 0.6666666666666666,\n \"acc_norm_stderr\": 0.04072314811876837\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.8618421052631579,\n \"acc_stderr\": 0.028081042939576552,\n \"acc_norm\": 0.8618421052631579,\n \"acc_norm_stderr\": 0.028081042939576552\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.77,\n \"acc_stderr\": 0.04229525846816505,\n \"acc_norm\": 0.77,\n \"acc_norm_stderr\": 0.04229525846816505\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.7735849056603774,\n \"acc_stderr\": 0.025757559893106723,\n \"acc_norm\": 0.7735849056603774,\n \"acc_norm_stderr\": 0.025757559893106723\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.8611111111111112,\n \"acc_stderr\": 0.028919802956134905,\n \"acc_norm\": 0.8611111111111112,\n \"acc_norm_stderr\": 0.028919802956134905\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.55,\n \"acc_stderr\": 0.04999999999999999,\n \"acc_norm\": 0.55,\n \"acc_norm_stderr\": 0.04999999999999999\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.6,\n \"acc_stderr\": 0.049236596391733084,\n \"acc_norm\": 0.6,\n \"acc_norm_stderr\": 0.049236596391733084\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.36,\n \"acc_stderr\": 0.04824181513244218,\n \"acc_norm\": 0.36,\n \"acc_norm_stderr\": 0.04824181513244218\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.6994219653179191,\n \"acc_stderr\": 0.0349610148119118,\n \"acc_norm\": 0.6994219653179191,\n \"acc_norm_stderr\": 0.0349610148119118\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.47058823529411764,\n \"acc_stderr\": 0.04966570903978529,\n \"acc_norm\": 0.47058823529411764,\n \"acc_norm_stderr\": 0.04966570903978529\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.8,\n \"acc_stderr\": 0.04020151261036845,\n \"acc_norm\": 0.8,\n \"acc_norm_stderr\": 0.04020151261036845\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.7446808510638298,\n \"acc_stderr\": 0.02850485647051426,\n \"acc_norm\": 0.7446808510638298,\n \"acc_norm_stderr\": 0.02850485647051426\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.5526315789473685,\n \"acc_stderr\": 0.04677473004491199,\n \"acc_norm\": 0.5526315789473685,\n \"acc_norm_stderr\": 0.04677473004491199\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.7448275862068966,\n \"acc_stderr\": 0.03632984052707842,\n \"acc_norm\": 0.7448275862068966,\n \"acc_norm_stderr\": 0.03632984052707842\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.5582010582010583,\n \"acc_stderr\": 0.025576257061253833,\n \"acc_norm\": 0.5582010582010583,\n \"acc_norm_stderr\": 0.025576257061253833\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.5,\n \"acc_stderr\": 0.04472135954999579,\n \"acc_norm\": 0.5,\n \"acc_norm_stderr\": 0.04472135954999579\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.5,\n \"acc_stderr\": 0.050251890762960605,\n \"acc_norm\": 0.5,\n \"acc_norm_stderr\": 0.050251890762960605\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.9,\n \"acc_stderr\": 0.017066403719657255,\n \"acc_norm\": 0.9,\n \"acc_norm_stderr\": 0.017066403719657255\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.5911330049261084,\n \"acc_stderr\": 0.03459058815883232,\n \"acc_norm\": 0.5911330049261084,\n \"acc_norm_stderr\": 0.03459058815883232\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.81,\n \"acc_stderr\": 0.03942772444036624,\n \"acc_norm\": 0.81,\n \"acc_norm_stderr\": 0.03942772444036624\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.8303030303030303,\n \"acc_stderr\": 0.029311188674983116,\n \"acc_norm\": 0.8303030303030303,\n \"acc_norm_stderr\": 0.029311188674983116\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.9292929292929293,\n \"acc_stderr\": 0.018263105420199505,\n \"acc_norm\": 0.9292929292929293,\n \"acc_norm_stderr\": 0.018263105420199505\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.9792746113989638,\n \"acc_stderr\": 0.010281417011909032,\n \"acc_norm\": 0.9792746113989638,\n \"acc_norm_stderr\": 0.010281417011909032\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.782051282051282,\n \"acc_stderr\": 0.020932445774463196,\n \"acc_norm\": 0.782051282051282,\n \"acc_norm_stderr\": 0.020932445774463196\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.3962962962962963,\n \"acc_stderr\": 0.029822619458533997,\n \"acc_norm\": 0.3962962962962963,\n \"acc_norm_stderr\": 0.029822619458533997\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.8529411764705882,\n \"acc_stderr\": 0.023005459446673957,\n \"acc_norm\": 0.8529411764705882,\n \"acc_norm_stderr\": 0.023005459446673957\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.4768211920529801,\n \"acc_stderr\": 0.04078093859163083,\n \"acc_norm\": 0.4768211920529801,\n \"acc_norm_stderr\": 0.04078093859163083\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.9064220183486239,\n \"acc_stderr\": 0.01248684182460197,\n \"acc_norm\": 0.9064220183486239,\n \"acc_norm_stderr\": 0.01248684182460197\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.6527777777777778,\n \"acc_stderr\": 0.032468872436376486,\n \"acc_norm\": 0.6527777777777778,\n \"acc_norm_stderr\": 0.032468872436376486\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.9019607843137255,\n \"acc_stderr\": 0.020871118455552104,\n \"acc_norm\": 0.9019607843137255,\n \"acc_norm_stderr\": 0.020871118455552104\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.8945147679324894,\n \"acc_stderr\": 0.019995560723758535,\n \"acc_norm\": 0.8945147679324894,\n \"acc_norm_stderr\": 0.019995560723758535\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.8026905829596412,\n \"acc_stderr\": 0.02670985334496796,\n \"acc_norm\": 0.8026905829596412,\n \"acc_norm_stderr\": 0.02670985334496796\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.8396946564885496,\n \"acc_stderr\": 0.03217829420744631,\n \"acc_norm\": 0.8396946564885496,\n \"acc_norm_stderr\": 0.03217829420744631\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.8677685950413223,\n \"acc_stderr\": 0.03092278832044579,\n \"acc_norm\": 0.8677685950413223,\n \"acc_norm_stderr\": 0.03092278832044579\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.8611111111111112,\n \"acc_stderr\": 0.0334327006286962,\n \"acc_norm\": 0.8611111111111112,\n \"acc_norm_stderr\": 0.0334327006286962\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.8834355828220859,\n \"acc_stderr\": 0.025212327210507104,\n \"acc_norm\": 0.8834355828220859,\n \"acc_norm_stderr\": 0.025212327210507104\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.6071428571428571,\n \"acc_stderr\": 0.04635550135609976,\n \"acc_norm\": 0.6071428571428571,\n \"acc_norm_stderr\": 0.04635550135609976\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.8349514563106796,\n \"acc_stderr\": 0.03675668832233188,\n \"acc_norm\": 0.8349514563106796,\n \"acc_norm_stderr\": 0.03675668832233188\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.9102564102564102,\n \"acc_stderr\": 0.018724301741941646,\n \"acc_norm\": 0.9102564102564102,\n \"acc_norm_stderr\": 0.018724301741941646\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.83,\n \"acc_stderr\": 0.03775251680686371,\n \"acc_norm\": 0.83,\n \"acc_norm_stderr\": 0.03775251680686371\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.8876117496807152,\n \"acc_stderr\": 0.011294541351216533,\n \"acc_norm\": 0.8876117496807152,\n \"acc_norm_stderr\": 0.011294541351216533\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.8179190751445087,\n \"acc_stderr\": 0.020776761102512965,\n \"acc_norm\": 0.8179190751445087,\n \"acc_norm_stderr\": 0.020776761102512965\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.6324022346368715,\n \"acc_stderr\": 0.016125543823552944,\n \"acc_norm\": 0.6324022346368715,\n \"acc_norm_stderr\": 0.016125543823552944\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.8169934640522876,\n \"acc_stderr\": 0.02214076751288097,\n \"acc_norm\": 0.8169934640522876,\n \"acc_norm_stderr\": 0.02214076751288097\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.8167202572347267,\n \"acc_stderr\": 0.021974198848265823,\n \"acc_norm\": 0.8167202572347267,\n \"acc_norm_stderr\": 0.021974198848265823\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.8240740740740741,\n \"acc_stderr\": 0.021185893615225153,\n \"acc_norm\": 0.8240740740740741,\n \"acc_norm_stderr\": 0.021185893615225153\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.624113475177305,\n \"acc_stderr\": 0.028893955412115882,\n \"acc_norm\": 0.624113475177305,\n \"acc_norm_stderr\": 0.028893955412115882\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.5684485006518905,\n \"acc_stderr\": 0.012650007999463902,\n \"acc_norm\": 0.5684485006518905,\n \"acc_norm_stderr\": 0.012650007999463902\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.7977941176470589,\n \"acc_stderr\": 0.024398192986654924,\n \"acc_norm\": 0.7977941176470589,\n \"acc_norm_stderr\": 0.024398192986654924\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.7875816993464052,\n \"acc_stderr\": 0.016547148636203147,\n \"acc_norm\": 0.7875816993464052,\n \"acc_norm_stderr\": 0.016547148636203147\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.7272727272727273,\n \"acc_stderr\": 0.04265792110940588,\n \"acc_norm\": 0.7272727272727273,\n \"acc_norm_stderr\": 0.04265792110940588\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.8204081632653061,\n \"acc_stderr\": 0.024573293589585637,\n \"acc_norm\": 0.8204081632653061,\n \"acc_norm_stderr\": 0.024573293589585637\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.8805970149253731,\n \"acc_stderr\": 0.02292879327721974,\n \"acc_norm\": 0.8805970149253731,\n \"acc_norm_stderr\": 0.02292879327721974\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.88,\n \"acc_stderr\": 0.03265986323710906,\n \"acc_norm\": 0.88,\n \"acc_norm_stderr\": 0.03265986323710906\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5542168674698795,\n \"acc_stderr\": 0.038695433234721015,\n \"acc_norm\": 0.5542168674698795,\n \"acc_norm_stderr\": 0.038695433234721015\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.8538011695906432,\n \"acc_stderr\": 0.027097290118070827,\n \"acc_norm\": 0.8538011695906432,\n \"acc_norm_stderr\": 0.027097290118070827\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.3990208078335373,\n \"mc1_stderr\": 0.01714282572849677,\n \"mc2\": 0.575213422471882,\n \"mc2_stderr\": 0.01606436002486393\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7545382794001578,\n \"acc_stderr\": 0.012095272937183653\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.2880970432145565,\n \"acc_stderr\": 0.012474469737197923\n }\n}\n```", "repo_url": "https://huggingface.co/Steelskull/VerB-Etheria-55b", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_01_25T17_11_57.529002", "path": ["**/details_harness|arc:challenge|25_2024-01-25T17-11-57.529002.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-01-25T17-11-57.529002.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_01_25T17_11_57.529002", "path": ["**/details_harness|gsm8k|5_2024-01-25T17-11-57.529002.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-01-25T17-11-57.529002.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_01_25T17_11_57.529002", "path": ["**/details_harness|hellaswag|10_2024-01-25T17-11-57.529002.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-01-25T17-11-57.529002.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_01_25T17_11_57.529002", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-25T17-11-57.529002.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-25T17-11-57.529002.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-25T17-11-57.529002.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-25T17-11-57.529002.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-25T17-11-57.529002.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-25T17-11-57.529002.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-25T17-11-57.529002.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-25T17-11-57.529002.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-25T17-11-57.529002.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-25T17-11-57.529002.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-25T17-11-57.529002.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-25T17-11-57.529002.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-25T17-11-57.529002.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-25T17-11-57.529002.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-25T17-11-57.529002.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-25T17-11-57.529002.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-25T17-11-57.529002.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-25T17-11-57.529002.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-25T17-11-57.529002.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-25T17-11-57.529002.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-25T17-11-57.529002.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-25T17-11-57.529002.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-25T17-11-57.529002.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-25T17-11-57.529002.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-25T17-11-57.529002.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-25T17-11-57.529002.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-25T17-11-57.529002.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-25T17-11-57.529002.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-25T17-11-57.529002.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-25T17-11-57.529002.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-25T17-11-57.529002.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-25T17-11-57.529002.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-25T17-11-57.529002.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-25T17-11-57.529002.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-25T17-11-57.529002.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-25T17-11-57.529002.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-25T17-11-57.529002.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-25T17-11-57.529002.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-25T17-11-57.529002.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-25T17-11-57.529002.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-25T17-11-57.529002.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-25T17-11-57.529002.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-25T17-11-57.529002.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-25T17-11-57.529002.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-25T17-11-57.529002.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-25T17-11-57.529002.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-25T17-11-57.529002.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-25T17-11-57.529002.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-25T17-11-57.529002.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-25T17-11-57.529002.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-25T17-11-57.529002.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-25T17-11-57.529002.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-25T17-11-57.529002.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-25T17-11-57.529002.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-25T17-11-57.529002.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-25T17-11-57.529002.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-25T17-11-57.529002.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-25T17-11-57.529002.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-25T17-11-57.529002.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-25T17-11-57.529002.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-25T17-11-57.529002.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-25T17-11-57.529002.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-25T17-11-57.529002.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-25T17-11-57.529002.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-25T17-11-57.529002.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-25T17-11-57.529002.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-25T17-11-57.529002.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-25T17-11-57.529002.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-25T17-11-57.529002.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-25T17-11-57.529002.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-25T17-11-57.529002.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-25T17-11-57.529002.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-25T17-11-57.529002.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-25T17-11-57.529002.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-25T17-11-57.529002.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-25T17-11-57.529002.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-25T17-11-57.529002.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-25T17-11-57.529002.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-25T17-11-57.529002.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-25T17-11-57.529002.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-25T17-11-57.529002.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-25T17-11-57.529002.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-25T17-11-57.529002.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-25T17-11-57.529002.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-25T17-11-57.529002.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-25T17-11-57.529002.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-25T17-11-57.529002.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-25T17-11-57.529002.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-25T17-11-57.529002.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-25T17-11-57.529002.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-25T17-11-57.529002.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-25T17-11-57.529002.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-25T17-11-57.529002.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-25T17-11-57.529002.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-25T17-11-57.529002.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-25T17-11-57.529002.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-25T17-11-57.529002.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-25T17-11-57.529002.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-25T17-11-57.529002.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-25T17-11-57.529002.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-25T17-11-57.529002.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-25T17-11-57.529002.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-25T17-11-57.529002.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-25T17-11-57.529002.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-25T17-11-57.529002.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-25T17-11-57.529002.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-25T17-11-57.529002.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-25T17-11-57.529002.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-25T17-11-57.529002.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-25T17-11-57.529002.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-25T17-11-57.529002.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-25T17-11-57.529002.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-25T17-11-57.529002.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-25T17-11-57.529002.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_01_25T17_11_57.529002", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-25T17-11-57.529002.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-25T17-11-57.529002.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_01_25T17_11_57.529002", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-25T17-11-57.529002.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-25T17-11-57.529002.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_01_25T17_11_57.529002", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-25T17-11-57.529002.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-25T17-11-57.529002.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_01_25T17_11_57.529002", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-25T17-11-57.529002.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-25T17-11-57.529002.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_01_25T17_11_57.529002", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-25T17-11-57.529002.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-25T17-11-57.529002.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_01_25T17_11_57.529002", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-25T17-11-57.529002.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-25T17-11-57.529002.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_01_25T17_11_57.529002", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-25T17-11-57.529002.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-25T17-11-57.529002.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_01_25T17_11_57.529002", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-25T17-11-57.529002.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-25T17-11-57.529002.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_01_25T17_11_57.529002", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-25T17-11-57.529002.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-25T17-11-57.529002.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_01_25T17_11_57.529002", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-25T17-11-57.529002.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-25T17-11-57.529002.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_01_25T17_11_57.529002", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-25T17-11-57.529002.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-25T17-11-57.529002.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_01_25T17_11_57.529002", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-25T17-11-57.529002.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-25T17-11-57.529002.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_01_25T17_11_57.529002", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-25T17-11-57.529002.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-25T17-11-57.529002.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_01_25T17_11_57.529002", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-25T17-11-57.529002.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-25T17-11-57.529002.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_01_25T17_11_57.529002", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-25T17-11-57.529002.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-25T17-11-57.529002.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_01_25T17_11_57.529002", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-25T17-11-57.529002.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-25T17-11-57.529002.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_01_25T17_11_57.529002", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-25T17-11-57.529002.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-25T17-11-57.529002.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_01_25T17_11_57.529002", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-25T17-11-57.529002.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-25T17-11-57.529002.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_01_25T17_11_57.529002", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-25T17-11-57.529002.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-25T17-11-57.529002.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_01_25T17_11_57.529002", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-25T17-11-57.529002.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-25T17-11-57.529002.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_01_25T17_11_57.529002", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-25T17-11-57.529002.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-25T17-11-57.529002.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_01_25T17_11_57.529002", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-25T17-11-57.529002.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-25T17-11-57.529002.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_01_25T17_11_57.529002", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-25T17-11-57.529002.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-25T17-11-57.529002.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_01_25T17_11_57.529002", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-25T17-11-57.529002.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-25T17-11-57.529002.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_01_25T17_11_57.529002", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-25T17-11-57.529002.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-25T17-11-57.529002.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_01_25T17_11_57.529002", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-25T17-11-57.529002.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-25T17-11-57.529002.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_01_25T17_11_57.529002", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-25T17-11-57.529002.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-25T17-11-57.529002.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_01_25T17_11_57.529002", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-25T17-11-57.529002.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-25T17-11-57.529002.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_01_25T17_11_57.529002", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-25T17-11-57.529002.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-25T17-11-57.529002.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_01_25T17_11_57.529002", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-25T17-11-57.529002.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-25T17-11-57.529002.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_01_25T17_11_57.529002", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-25T17-11-57.529002.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-25T17-11-57.529002.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_01_25T17_11_57.529002", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-25T17-11-57.529002.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-25T17-11-57.529002.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_01_25T17_11_57.529002", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-25T17-11-57.529002.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-25T17-11-57.529002.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_01_25T17_11_57.529002", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-25T17-11-57.529002.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-25T17-11-57.529002.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_01_25T17_11_57.529002", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-25T17-11-57.529002.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-25T17-11-57.529002.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_01_25T17_11_57.529002", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-25T17-11-57.529002.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-25T17-11-57.529002.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_01_25T17_11_57.529002", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-25T17-11-57.529002.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-25T17-11-57.529002.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_01_25T17_11_57.529002", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-25T17-11-57.529002.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-25T17-11-57.529002.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_01_25T17_11_57.529002", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-25T17-11-57.529002.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-25T17-11-57.529002.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_01_25T17_11_57.529002", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-25T17-11-57.529002.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-25T17-11-57.529002.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_01_25T17_11_57.529002", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-25T17-11-57.529002.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-25T17-11-57.529002.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_01_25T17_11_57.529002", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-25T17-11-57.529002.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-25T17-11-57.529002.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_01_25T17_11_57.529002", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-25T17-11-57.529002.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-25T17-11-57.529002.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_01_25T17_11_57.529002", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-25T17-11-57.529002.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-25T17-11-57.529002.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_01_25T17_11_57.529002", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-25T17-11-57.529002.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-25T17-11-57.529002.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_01_25T17_11_57.529002", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-25T17-11-57.529002.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-25T17-11-57.529002.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_01_25T17_11_57.529002", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-25T17-11-57.529002.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-25T17-11-57.529002.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_01_25T17_11_57.529002", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-25T17-11-57.529002.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-25T17-11-57.529002.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_01_25T17_11_57.529002", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-25T17-11-57.529002.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-25T17-11-57.529002.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_01_25T17_11_57.529002", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-25T17-11-57.529002.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-25T17-11-57.529002.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_01_25T17_11_57.529002", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-25T17-11-57.529002.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-25T17-11-57.529002.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_01_25T17_11_57.529002", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-25T17-11-57.529002.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-25T17-11-57.529002.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_01_25T17_11_57.529002", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-25T17-11-57.529002.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-25T17-11-57.529002.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_01_25T17_11_57.529002", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-25T17-11-57.529002.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-25T17-11-57.529002.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_01_25T17_11_57.529002", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-25T17-11-57.529002.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-25T17-11-57.529002.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_01_25T17_11_57.529002", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-25T17-11-57.529002.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-25T17-11-57.529002.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_01_25T17_11_57.529002", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-25T17-11-57.529002.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-25T17-11-57.529002.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_01_25T17_11_57.529002", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-25T17-11-57.529002.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-25T17-11-57.529002.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_01_25T17_11_57.529002", "path": ["**/details_harness|winogrande|5_2024-01-25T17-11-57.529002.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-01-25T17-11-57.529002.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_01_25T17_11_57.529002", "path": ["results_2024-01-25T17-11-57.529002.parquet"]}, {"split": "latest", "path": ["results_2024-01-25T17-11-57.529002.parquet"]}]}]}
2024-01-25T17:14:33+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of Steelskull/VerB-Etheria-55b Dataset automatically created during the evaluation run of model Steelskull/VerB-Etheria-55b on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-01-25T17:11:57.529002(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of Steelskull/VerB-Etheria-55b\n\n\n\nDataset automatically created during the evaluation run of model Steelskull/VerB-Etheria-55b on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-25T17:11:57.529002(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of Steelskull/VerB-Etheria-55b\n\n\n\nDataset automatically created during the evaluation run of model Steelskull/VerB-Etheria-55b on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-25T17:11:57.529002(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ 6, 185, 68, 4, 40, 29, 3, 4, 9, 6, 5, 7, 4, 7, 10, 9, 5, 9, 8, 10, 46, 8, 7, 10, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of Steelskull/VerB-Etheria-55b\n\n\n\nDataset automatically created during the evaluation run of model Steelskull/VerB-Etheria-55b on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2024-01-25T17:11:57.529002(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):## Dataset Details### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:## Uses### Direct Use### Out-of-Scope Use## Dataset Structure## Dataset Creation### Curation Rationale### Source Data#### Data Collection and Processing#### Who are the source data producers?### Annotations [optional]#### Annotation process#### Who are the annotators?#### Personal and Sensitive Information## Bias, Risks, and Limitations### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:## Glossary [optional]## More Information [optional]## Dataset Card Authors [optional]## Dataset Card Contact" ]
[ -0.05993993580341339, 0.2019166648387909, -0.005041934084147215, 0.04048515111207962, 0.08233936131000519, -0.012669704854488373, 0.04660375416278839, 0.09738259762525558, 0.025500671938061714, 0.19160984456539154, -0.02831924706697464, 0.11591096967458725, 0.06624524295330048, 0.13179880380630493, 0.026214340701699257, -0.12625420093536377, 0.02683744952082634, -0.08671785145998001, 0.10556101053953171, 0.08220460265874863, 0.05772627145051956, -0.077614426612854, 0.0653359666466713, -0.026294879615306854, 0.02720254473388195, -0.006159204989671707, -0.0787297710776329, -0.018963031470775604, 0.1038329005241394, 0.10337856411933899, 0.04062643647193909, -0.016155878081917763, 0.023123586550354958, -0.26050442457199097, 0.01655658334493637, 0.09137412905693054, -0.007314336020499468, 0.03368579223752022, 0.11964471638202667, -0.07882516086101532, 0.07195622473955154, -0.02561456337571144, 0.08772795647382736, 0.05091210827231407, -0.10371910780668259, -0.1563771516084671, -0.14839422702789307, 0.01951891928911209, 0.05494281277060509, 0.039235055446624756, -0.030240913853049278, 0.16466869413852692, -0.07227395474910736, 0.04481330141425133, 0.12406910955905914, -0.10270864516496658, -0.014875897206366062, 0.040740881115198135, 0.009357010014355183, 0.08269928395748138, -0.08370614796876907, -0.025283504277467728, 0.02923072688281536, 0.05873517692089081, -0.0026322496123611927, 0.016997769474983215, -0.031027430668473244, 0.014824071899056435, -0.13517659902572632, -0.11815309524536133, 0.14807923138141632, -0.0006617859471589327, -0.052852075546979904, -0.18111692368984222, -0.008438007906079292, 0.026699978858232498, 0.004803041461855173, 0.0016754186945036054, -0.001911542727611959, -0.02299412712454796, 0.09361271560192108, -0.005110905505716801, -0.10436902195215225, -0.03654827922582626, -0.007479509338736534, 0.08176663517951965, 0.022587308660149574, -0.013327734544873238, 0.015172116458415985, 0.11982758343219757, 0.008433403447270393, -0.06097763031721115, -0.07174187898635864, -0.057728495448827744, -0.1330001950263977, -0.044955622404813766, 0.011607480235397816, -0.06864374130964279, 0.031243156641721725, 0.23858493566513062, -0.00412772735580802, 0.029901858419179916, -0.11227768659591675, 0.014001403003931046, 0.11374977231025696, 0.054825037717819214, -0.0828428566455841, -0.03948841989040375, -0.04255988448858261, 0.03176073357462883, 0.03247714415192604, -0.014836333692073822, 0.004964136052876711, 0.053507376462221146, 0.027713477611541748, 0.12052139639854431, 0.11553354561328888, 0.03559811785817146, -0.08132375031709671, -0.017290089279413223, 0.23364530503749847, -0.1412995457649231, -0.01645054668188095, 0.019639842212200165, -0.03034891001880169, -0.11062046140432358, 0.06533369421958923, -0.004755754955112934, -0.05442533642053604, 0.1334211230278015, -0.05224423110485077, -0.08496689051389694, -0.07137850672006607, -0.06341808289289474, 0.05164914205670357, 0.01684487983584404, -0.049412332475185394, -0.06626492738723755, -0.10735198855400085, -0.0823136419057846, 0.02874569594860077, -0.06091899797320366, -0.03385038301348686, 0.02649042010307312, 0.005196709651499987, -0.009733869694173336, -0.017467135563492775, 0.12157337367534637, -0.05487138405442238, 0.039368536323308945, -0.015377040952444077, 0.011590512469410896, 0.09524275362491608, 0.042238954454660416, -0.11503822356462479, 0.07205288857221603, -0.13246704638004303, 0.09783963859081268, -0.11982809007167816, -0.008700107224285603, -0.12027289718389511, -0.005664135795086622, -0.045911893248558044, 0.04471127688884735, -0.02879541739821434, 0.0855686143040657, -0.2203720510005951, -0.00799129344522953, 0.14717607200145721, -0.13171280920505524, -0.07568295300006866, 0.09432920813560486, -0.04441910609602928, 0.06565981358289719, 0.0494111143052578, 0.11215557157993317, 0.10233529657125473, -0.0860663503408432, -0.09181302785873413, -0.05784114450216293, -0.029968706890940666, 0.1589067280292511, 0.06688038259744644, -0.08830025792121887, 0.10372044146060944, 0.05511517822742462, -0.028721056878566742, -0.07960645109415054, 0.006631325464695692, -0.06870461255311966, -0.02416326478123665, -0.06870829313993454, -0.04373316466808319, -0.00007886933599365875, -0.07305067032575607, -0.014593703672289848, -0.08888666331768036, 0.0020867781713604927, 0.10144813358783722, -0.02471233531832695, 0.006678916048258543, -0.06761064380407333, 0.03866159915924072, 0.012093854136765003, 0.011955389752984047, -0.2148798555135727, -0.09784028679132462, 0.03329295665025711, -0.20158353447914124, 0.058239907026290894, 0.04396068677306175, 0.011095276102423668, 0.0438663475215435, -0.008055269718170166, 0.026227619498968124, 0.014741661958396435, -0.013011171482503414, -0.010185911320149899, -0.156788632273674, -0.058852892369031906, -0.08718673139810562, 0.09612872451543808, -0.13671743869781494, -0.010929080657660961, 0.07020331174135208, 0.14869573712348938, 0.0224823746830225, -0.07794676721096039, 0.05270500108599663, 0.010374454781413078, -0.03861742094159126, -0.04907682538032532, -0.0034148774575442076, -0.024136750027537346, 0.03528578579425812, 0.038685400038957596, -0.20102904736995697, -0.0833534449338913, 0.06341803073883057, 0.13441556692123413, -0.07762358337640762, -0.1017734706401825, -0.061381686478853226, -0.06204405054450035, -0.09016639739274979, -0.07868223637342453, 0.07704248279333115, 0.08561854809522629, 0.03612370789051056, -0.07468190789222717, -0.05384453758597374, 0.002585281152278185, 0.0581698939204216, -0.06351561099290848, 0.11237206310033798, 0.06720224022865295, -0.07509390264749527, 0.11059658229351044, -0.0640997514128685, 0.1036827489733696, 0.08636510372161865, 0.03378238528966904, -0.10114937275648117, 0.0029438575729727745, 0.0649418830871582, 0.04328615218400955, 0.08767086267471313, -0.06356024742126465, 0.03486844524741173, 0.08251246064901352, -0.014616799540817738, 0.040511514991521835, -0.06387649476528168, 0.023342207074165344, 0.029218042269349098, 0.007699516601860523, 0.003572777146473527, 0.014421146363019943, 0.020225713029503822, 0.08304398506879807, 0.02418961562216282, 0.12222018092870712, -0.026735443621873856, -0.052820079028606415, -0.10818202793598175, 0.15157543122768402, -0.07894109189510345, -0.2613730728626251, -0.1719074547290802, -0.023167135193943977, -0.016726620495319366, -0.016680097207427025, 0.05971866846084595, -0.01288119051605463, -0.11509358882904053, -0.1079060286283493, 0.06762831658124924, 0.037557728588581085, -0.1318940371274948, -0.050985049456357956, 0.05460720509290695, -0.00920860469341278, -0.16756504774093628, 0.04438261687755585, 0.04405326768755913, -0.04770868271589279, -0.008002407848834991, 0.07350753247737885, 0.10799084603786469, 0.09463276714086533, 0.08703984320163727, -0.026484550908207893, -0.007854873314499855, 0.1670975685119629, -0.1111607700586319, 0.029553012922406197, 0.09837818890810013, -0.06816625595092773, 0.06954116374254227, 0.16845329105854034, 0.014470428228378296, -0.08019280433654785, 0.056286074221134186, 0.1004389151930809, -0.06894512474536896, -0.242213636636734, -0.12553566694259644, -0.034607816487550735, 0.013071874156594276, 0.10688649863004684, 0.06659885495901108, 0.01385900005698204, 0.013815687038004398, -0.12941502034664154, -0.02657204121351242, -0.04411205276846886, 0.06799127161502838, 0.05875823274254799, 0.0008353322045877576, 0.046248555183410645, -0.04305264353752136, 0.01820361241698265, 0.11666340380907059, 0.05273369327187538, 0.15128988027572632, -0.04882436245679855, 0.18691577017307281, 0.09082010388374329, 0.07629856467247009, -0.036905162036418915, 0.045117516070604324, -0.02464919351041317, 0.060477711260318756, -0.011707911267876625, -0.10890323668718338, -0.035280946642160416, 0.10657177865505219, 0.04255347326397896, -0.07383948564529419, 0.040029242634773254, -0.0937129408121109, 0.038221508264541626, 0.19542023539543152, -0.03266754746437073, -0.12668342888355255, -0.06349937617778778, 0.063713937997818, -0.03431323170661926, -0.08484398573637009, -0.025563102215528488, 0.08430790901184082, -0.1421712189912796, 0.013309444300830364, -0.038976527750492096, 0.07295867800712585, -0.12336644530296326, -0.02707871049642563, -0.02710285596549511, 0.03772551566362381, -0.00630926201120019, 0.11504649370908737, -0.1192566379904747, 0.09050574153661728, -0.0009206575341522694, 0.027720024809241295, -0.10848604887723923, 0.05448267236351967, -0.03117368370294571, -0.05973953381180763, 0.13607972860336304, -0.011655396781861782, -0.07138493657112122, -0.04963209852576256, -0.0982975959777832, -0.004846504889428616, 0.05114708095788956, -0.10048680752515793, 0.1068912073969841, 0.023722827434539795, -0.016855956986546516, -0.024987097829580307, -0.015761302784085274, -0.11085387319326401, -0.24643802642822266, 0.11918918043375015, -0.14570429921150208, 0.028529394418001175, -0.07007630169391632, -0.05727285519242287, -0.04236990585923195, 0.15058428049087524, -0.10312432050704956, -0.05929087847471237, -0.11009091883897781, -0.014833367429673672, 0.18753419816493988, -0.052452925592660904, 0.06035682186484337, -0.045906029641628265, 0.19213855266571045, -0.024220366030931473, -0.04192081466317177, -0.008237281814217567, -0.076906718313694, -0.19315384328365326, -0.04480781406164169, 0.10616597533226013, 0.07954224944114685, 0.01023534033447504, -0.007459556683897972, 0.017584681510925293, 0.007419420871883631, -0.10198787599802017, 0.029406102374196053, 0.10199887305498123, 0.12213214486837387, 0.05332120507955551, -0.04164596274495125, -0.13221123814582825, -0.0952020138502121, -0.10423201322555542, 0.039270538836717606, 0.174608051776886, -0.06610110402107239, 0.16938456892967224, 0.16582836210727692, -0.09311306476593018, -0.20126862823963165, -0.050808731466531754, 0.019111786037683487, -0.015287453308701515, 0.14156664907932281, -0.19807885587215424, 0.06674068421125412, 0.07023543119430542, -0.02891087345778942, 0.11324901133775711, -0.264085590839386, -0.14190925657749176, 0.03390635922551155, 0.04070647433400154, -0.2345310002565384, -0.17017415165901184, -0.1049431562423706, -0.02317737601697445, -0.1541469842195511, 0.13415293395519257, -0.007573507260531187, 0.02710386924445629, -0.022579651325941086, 0.0973319411277771, 0.053701408207416534, -0.07799381017684937, 0.12686562538146973, -0.014031009748578072, 0.022489821538329124, -0.0989726334810257, -0.04630448669195175, 0.006931600160896778, -0.042884912341833115, 0.08669926226139069, 0.009367045015096664, 0.054344989359378815, -0.07440593093633652, -0.04143630340695381, -0.07531504333019257, 0.05846736207604408, -0.08161211013793945, -0.05575777590274811, -0.07256735116243362, 0.0846676379442215, 0.07435693591833115, -0.016050295904278755, 0.008166681043803692, -0.04935121908783913, 0.05945777893066406, 0.18664225935935974, 0.11054675281047821, 0.05537422001361847, -0.09977234154939651, -0.028586922213435173, -0.013616248033940792, -0.005248313304036856, -0.10036583989858627, 0.05036349594593048, 0.08369527012109756, 0.052583057433366776, 0.08922867476940155, -0.02230079099535942, -0.18531234562397003, 0.0005652348045259714, 0.07672591507434845, -0.08886663615703583, -0.19229832291603088, 0.04561099782586098, 0.143873393535614, -0.15783219039440155, -0.06984441727399826, 0.06899701058864594, 0.024939024820923805, -0.039707161486148834, -0.0009199976921081543, 0.07692642509937286, 0.05262981727719307, 0.10623914748430252, 0.018119197338819504, 0.05103924870491028, -0.07047224789857864, 0.08862122148275375, 0.12476752698421478, -0.11570128798484802, 0.006495092995464802, 0.031730521470308304, -0.057589348405599594, -0.07317059487104416, -0.0005291957641020417, 0.01837984099984169, 0.02396215684711933, -0.028873978182673454, 0.01628146693110466, -0.03357655182480812, 0.05613582208752632, 0.13333715498447418, 0.001362182549200952, 0.05508234724402428, 0.015206815674901009, 0.00483934348449111, -0.05499589815735817, 0.10640940070152283, 0.031466562300920486, 0.04704704135656357, -0.04041358456015587, 0.033328428864479065, 0.020961301401257515, -0.0323941633105278, 0.018248513340950012, -0.0460604727268219, -0.07657621800899506, -0.0005190405063331127, -0.15612226724624634, 0.05409819632768631, -0.08052986115217209, 0.009950006380677223, -0.006339554209262133, -0.028566015884280205, -0.00782476831227541, 0.0077254134230315685, -0.08073199540376663, -0.03967278450727463, -0.04640694335103035, 0.1287294626235962, -0.1980353146791458, 0.005400645546615124, 0.09607696533203125, -0.07144688814878464, 0.0700841173529625, -0.010149519890546799, -0.022739684209227562, 0.03476707637310028, -0.11390142887830734, 0.012774167582392693, -0.02819366380572319, 0.05976315587759018, 0.009180315770208836, -0.12769261002540588, -0.017311351373791695, -0.00024783704429864883, -0.06512182950973511, -0.007188429124653339, 0.041463397443294525, -0.14965617656707764, 0.06618931889533997, 0.08813152462244034, -0.0620637983083725, -0.04451163113117218, 0.05275556445121765, 0.04785924032330513, 0.002737760776653886, 0.10683530569076538, -0.004873305093497038, 0.02088685892522335, -0.1532382369041443, -0.043566636741161346, 0.009508170187473297, 0.013677256181836128, 0.04841238632798195, 0.01752319559454918, 0.021679051220417023, 0.008592222817242146, 0.24124404788017273, -0.019939927384257317, 0.026717068627476692, 0.02545613795518875, -0.00687185674905777, -0.02840491570532322, 0.030351994559168816, 0.012381115928292274, -0.01802544668316841, 0.03209136426448822, 0.008256624452769756, -0.04534380882978439, -0.0753699392080307, -0.031021151691675186, 0.0779135525226593, 0.14238348603248596, 0.1620209664106369, -0.043835896998643875, 0.0813242644071579, -0.16917438805103302, -0.07126112282276154, 0.00576644204556942, -0.055271778255701065, 0.038296110928058624, -0.07921620458364487, 0.08180835098028183, 0.08810514211654663, -0.09421877562999725, 0.15262766182422638, -0.05280067026615143, -0.03339012712240219, -0.035119298845529556, -0.16507869958877563, -0.03961082920432091, 0.035700369626283646, -0.00004545789124676958, -0.08370057493448257, 0.11567560583353043, 0.13197897374629974, -0.012739591300487518, -0.0009190989658236504, 0.09039965271949768, -0.045130666345357895, -0.057543136179447174, -0.03346503898501396, 0.004787167999893427, 0.0035123273264616728, 0.010185730643570423, 0.07793477177619934, 0.01618214324116707, 0.059982914477586746, 0.07181897759437561, 0.09430605918169022, 0.019079826772212982, -0.0051787844859063625, -0.041525255888700485, -0.04569191113114357, 0.001163301756605506, -0.026158615946769714, -0.06690122187137604, 0.19965636730194092, 0.046493493020534515, 0.010775619186460972, 0.006966838147491217, 0.20804248750209808, -0.002053449861705303, -0.06485353410243988, -0.13533863425254822, 0.12488845735788345, -0.012266084551811218, 0.023325325921177864, 0.03461397811770439, -0.11675015836954117, 0.03842269256711006, 0.1555795818567276, 0.09165281057357788, 0.056511927396059036, 0.016430096700787544, 0.0459018275141716, 0.023923790082335472, -0.038945045322179794, 0.060476094484329224, 0.021769560873508453, 0.24706678092479706, -0.05599731579422951, 0.0923069566488266, -0.008848398923873901, -0.000041397881432203576, -0.013344280421733856, 0.10744009166955948, -0.04995875805616379, 0.023843249306082726, -0.08038640767335892, 0.09397761523723602, -0.0592302568256855, -0.26038727164268494, -0.0018662341171875596, -0.07335855811834335, -0.1455245018005371, -0.012752585113048553, 0.03537365794181824, -0.043327152729034424, 0.04456980526447296, 0.03014563024044037, -0.025892673060297966, 0.17896980047225952, 0.004381985403597355, -0.0743161290884018, -0.08500074595212936, 0.0741865411400795, -0.019490521401166916, 0.2809349000453949, -0.01194704882800579, 0.06268361210823059, 0.08800720423460007, -0.0168196689337492, -0.12588149309158325, 0.016064563766121864, 0.0872868075966835, -0.06144573166966438, 0.05454987287521362, 0.16826090216636658, -0.030763057991862297, 0.150469109416008, 0.030071016401052475, -0.022866783663630486, 0.07477521896362305, 0.07156816124916077, 0.034652046859264374, -0.10028248280286789, 0.07579486072063446, -0.09429282695055008, 0.13477061688899994, 0.10142907500267029, -0.006923256907612085, -0.0074966540560126305, -0.06058593466877937, 0.06345026195049286, -0.032898496836423874, 0.15567626059055328, -0.017184162512421608, -0.17740577459335327, 0.048073090612888336, 0.031257037073373795, 0.0663614422082901, -0.245945543050766, -0.05661126598715782, 0.10527753084897995, -0.04938744753599167, 0.017643071711063385, 0.08867212384939194, 0.044952187687158585, 0.027141103520989418, -0.054736796766519547, -0.12824216485023499, 0.0165669247508049, 0.12573093175888062, -0.09173846989870071, -0.032986026257276535 ]
3f3a70ee720cbc44c6ccc78f108777a9d8cb49d0
## 💻 Dataset Usage Run the following command to load the data: ```python from datasets import load_dataset dataset = load_dataset("shuyuej/MATH-Consistency") dataset = dataset['train'] print(dataset) ```
shuyuej/MATH-Consistency
[ "license:apache-2.0", "region:us" ]
2024-01-25T17:27:16+00:00
{"license": "apache-2.0"}
2024-02-04T02:56:04+00:00
[]
[]
TAGS #license-apache-2.0 #region-us
## Dataset Usage Run the following command to load the data:
[ "## Dataset Usage\nRun the following command to load the data:" ]
[ "TAGS\n#license-apache-2.0 #region-us \n", "## Dataset Usage\nRun the following command to load the data:" ]
[ 14, 14 ]
[ "passage: TAGS\n#license-apache-2.0 #region-us \n## Dataset Usage\nRun the following command to load the data:" ]
[ -0.06943472474813461, 0.17125317454338074, -0.004455668851733208, 0.006078519858419895, 0.06209815293550491, 0.0794142484664917, 0.12273544073104858, 0.10441745817661285, 0.16472923755645752, -0.05309644341468811, 0.11251383274793625, 0.11054177582263947, 0.033471349626779556, 0.1678016185760498, 0.005751472897827625, -0.06753864884376526, 0.09273435920476913, -0.08266307413578033, -0.09564749896526337, 0.002987775020301342, 0.0720694363117218, 0.014681399799883366, 0.032067254185676575, -0.06027469038963318, 0.0035244766622781754, 0.045622050762176514, 0.030450239777565002, 0.017367254942655563, 0.021313302218914032, -0.05707349628210068, 0.0049819000996649265, 0.039865512400865555, 0.08516258001327515, -0.2500132918357849, -0.006045536138117313, -0.0002771131694316864, -0.05404769256711006, 0.006790756247937679, 0.01125620398670435, 0.012256817892193794, -0.02638927474617958, -0.03271229937672615, -0.046034712344408035, 0.006980582140386105, -0.06659051030874252, -0.18468520045280457, -0.11096441745758057, -0.015979595482349396, 0.09476833045482635, 0.05407395958900452, 0.05911784619092941, 0.09674381464719772, -0.10562871396541595, 0.0069998325780034065, 0.04735151678323746, -0.20225292444229126, 0.06056796759366989, 0.19008448719978333, 0.024854708462953568, 0.09994632005691528, -0.017184199765324593, 0.00555717758834362, 0.05489666014909744, -0.025368815287947655, -0.05743981525301933, -0.04807623103260994, -0.14098821580410004, 0.1403028964996338, -0.030478350818157196, -0.13543632626533508, 0.4807993769645691, 0.0160335972905159, 0.025535792112350464, 0.07933206856250763, -0.05164159834384918, 0.007159324362874031, -0.014172931201756, 0.06304608285427094, 0.0878206193447113, 0.15403851866722107, 0.07094992697238922, -0.05324976518750191, -0.15516003966331482, -0.1004098653793335, -0.1072923019528389, -0.12933233380317688, -0.027578415349125862, 0.11227558553218842, -0.09117542952299118, -0.008576031774282455, -0.07383423298597336, -0.05124230682849884, -0.07150831818580627, -0.0349009707570076, -0.014803707599639893, 0.054280105978250504, -0.12328913807868958, 0.01343734934926033, 0.1898297369480133, 0.2024160623550415, 0.14730952680110931, 0.03760255128145218, -0.07059018313884735, 0.10122218728065491, 0.05980806425213814, -0.00011521577835083008, 0.03474026545882225, -0.024009980261325836, 0.08284662663936615, -0.10293971747159958, 0.056026309728622437, -0.05350653827190399, -0.12343086302280426, -0.008325835689902306, -0.10920204222202301, 0.09258559346199036, 0.08662278950214386, -0.059766121208667755, -0.05540382117033005, -0.019533850252628326, 0.15344667434692383, -0.11820454895496368, 0.02944810315966606, 0.0799579992890358, -0.04752987623214722, -0.06413846462965012, 0.09692273288965225, 0.023890655487775803, 0.03691817447543144, -0.08940103650093079, -0.04415231570601463, -0.02510206587612629, -0.03979949653148651, 0.022512080147862434, 0.09687069803476334, -0.026821965351700783, -0.004847998730838299, -0.04840760678052902, -0.33662930130958557, -0.025203842669725418, 0.1264933943748474, -0.0014894027262926102, 0.004050544463098049, 0.014096491038799286, 0.05765786021947861, 0.03711295127868652, -0.06287553906440735, 0.04942251741886139, -0.10872536897659302, 0.062457308173179626, -0.12237704545259476, 0.03884885460138321, -0.20335906744003296, 0.018008682876825333, -0.15130062401294708, 0.010354970581829548, 0.04521895945072174, 0.03755505755543709, -0.13220518827438354, 0.11726720631122589, -0.1269126832485199, 0.0832601934671402, -0.01227140985429287, -0.05426327884197235, -0.021754272282123566, 0.12822143733501434, -0.2770540714263916, 0.054783619940280914, 0.13322871923446655, -0.07297634333372116, -0.15364611148834229, -0.04685847461223602, 0.00303480657748878, 0.11989004909992218, 0.008060130290687084, 0.289919376373291, 0.04685509204864502, -0.10763256251811981, 0.10046853125095367, 0.19083523750305176, -0.09755373001098633, -0.26914215087890625, 0.15156538784503937, -0.14694306254386902, -0.039389681071043015, 0.011379745788872242, -0.09065338224172592, 0.1221199557185173, 0.04895826429128647, -0.12298046052455902, -0.02827529050409794, -0.11243222653865814, -0.03498372435569763, -0.03082111105322838, -0.020694775506854057, 0.027387043461203575, 0.10281307995319366, -0.05066999793052673, 0.1019170880317688, 0.022183017805218697, 0.043879665434360504, -0.021463967859745026, -0.039854392409324646, -0.01426796242594719, 0.019854173064231873, -0.04891686886548996, -0.0881013423204422, 0.01790449395775795, 0.000019584782421588898, 0.007547207176685333, 0.0636160671710968, 0.09072673320770264, -0.07686332613229752, 0.06503883749246597, 0.05832139402627945, 0.07860610634088516, 0.06687048822641373, -0.009041287936270237, -0.10138657689094543, 0.037013448774814606, -0.017413388937711716, 0.10686743259429932, -0.027942176908254623, 0.026651151478290558, -0.008600214496254921, -0.02766340970993042, -0.054202720522880554, -0.009914284572005272, 0.004475277848541737, -0.11735501885414124, 0.013870497234165668, -0.029156886041164398, 0.05293544754385948, 0.05143973231315613, -0.06803161650896072, 0.11172495782375336, -0.024572283029556274, -0.03626139834523201, 0.13345254957675934, 0.0016028918325901031, 0.1429334282875061, -0.06364671885967255, -0.0007171479519456625, -0.0694420337677002, 0.019968025386333466, -0.04349001124501228, -0.17439842224121094, 0.09127738326787949, -0.007492044009268284, -0.01094079203903675, 0.0009283549152314663, -0.03292721509933472, -0.07637333869934082, -0.021593043580651283, -0.0632651150226593, 0.23726968467235565, -0.0983211100101471, 0.12667620182037354, 0.27513250708580017, -0.01845426857471466, 0.06573908030986786, -0.09619554877281189, -0.111696258187294, -0.009605670347809792, 0.03918004035949707, -0.037590816617012024, 0.12409254908561707, -0.06440609693527222, 0.10389723628759384, 0.12646254897117615, 0.06336399912834167, 0.04404948279261589, -0.07028825581073761, -0.10325805842876434, 0.003777497448027134, -0.08992604911327362, -0.1399235874414444, -0.019188566133379936, -0.07419714331626892, -0.002209560014307499, -0.01993461325764656, -0.03615530952811241, 0.13361383974552155, -0.06480543315410614, -0.10067059099674225, 0.10902198404073715, -0.17247501015663147, -0.050900883972644806, -0.07066699862480164, -0.16260859370231628, -0.05678735673427582, 0.015098575502634048, 0.07322777807712555, -0.07244840264320374, -0.03358049318194389, 0.031222643330693245, -0.12971535325050354, 0.08762162923812866, -0.04727423936128616, 0.015509357675909996, 0.06234598159790039, 0.06380518525838852, -0.16239821910858154, -0.012001106515526772, 0.0277389008551836, 0.007383991032838821, 0.049305547028779984, -0.0931641012430191, 0.14027529954910278, 0.12152428925037384, 0.08316531032323837, 0.0664820522069931, 0.03380025178194046, 0.14797493815422058, 0.02113993465900421, -0.063491091132164, 0.09268038719892502, 0.003439154475927353, 0.0486578643321991, 0.1458265781402588, 0.03739837929606438, -0.08718066662549973, -0.00508253974840045, -0.020847419276833534, -0.12649521231651306, -0.275198757648468, -0.13460256159305573, -0.09800401329994202, 0.014640040695667267, 0.09602578729391098, 0.11885160952806473, 0.03895656019449234, 0.07796996086835861, 0.040156811475753784, 0.001516374759376049, -0.004696222487837076, -0.02094142884016037, 0.0462658666074276, -0.024004612118005753, -0.023276977241039276, -0.14627747237682343, 0.08675665408372879, 0.164139524102211, 0.14012938737869263, 0.23228979110717773, 0.16336089372634888, 0.18339547514915466, 0.10973236709833145, 0.15240877866744995, -0.08606286346912384, 0.20117712020874023, 0.06251294910907745, 0.012745719403028488, 0.0101119764149189, -0.04352162033319473, -0.07687318325042725, 0.11436016857624054, 0.06994528323411942, -0.22108674049377441, 0.07211336493492126, -0.09020695090293884, 0.06393399834632874, 0.08057168126106262, 0.04638546705245972, -0.15890313684940338, 0.12775404751300812, 0.0743337944149971, 0.16150787472724915, 0.030837256461381912, 0.08391150832176208, -0.05210934579372406, -0.009967600926756859, 0.10325166583061218, 0.024574732407927513, 0.08759009838104248, 0.06661029160022736, -0.05537986755371094, -0.0408899188041687, -0.13385455310344696, 0.05312856286764145, 0.18570688366889954, -0.1302276849746704, 0.15439021587371826, -0.0196116641163826, -0.03921682760119438, -0.10281398147344589, -0.03940857946872711, 0.024013912305235863, 0.06232661008834839, 0.1250613033771515, 0.09913244843482971, -0.10713256895542145, 0.020219210535287857, -0.1753794550895691, 0.03362160176038742, -0.035579197108745575, 0.011656609363853931, -0.09651002287864685, -0.00869862362742424, 0.0037512686103582382, 0.04207126796245575, 0.1853131353855133, -0.11713586747646332, -0.1290581077337265, -0.022877542302012444, 0.1544644832611084, -0.006893562152981758, -0.04194994270801544, 0.08443966507911682, 0.05993147939443588, 0.09688814729452133, -0.013236263766884804, -0.011703058145940304, -0.06468440592288971, -0.09045802056789398, 0.11511781066656113, -0.0013406320940703154, -0.02647324651479721, -0.03680873662233353, -0.04382390156388283, -0.07784045487642288, -0.14049610495567322, 0.08429387211799622, -0.09622471034526825, 0.049733683466911316, -0.06492075324058533, 0.07039912045001984, 0.019671007990837097, 0.0020633479580283165, 0.04730592668056488, 0.04110068455338478, -0.03247552365064621, -0.14085859060287476, 0.06499198824167252, 0.008715592324733734, -0.0005575420800596476, 0.03277839720249176, -0.03713076561689377, -0.001521100290119648, 0.07177935540676117, -0.04137754440307617, 0.23985691368579865, 0.19110921025276184, -0.08990263938903809, 0.16447195410728455, 0.24483275413513184, -0.08617126941680908, -0.21052195131778717, -0.17956998944282532, -0.13160231709480286, -0.10855046659708023, 0.06284195184707642, -0.17665967345237732, 0.09981662780046463, 0.14512473344802856, -0.12647706270217896, 0.08117486536502838, -0.18537792563438416, -0.08591479063034058, 0.18508681654930115, -0.059300586581230164, 0.21302926540374756, -0.159997820854187, -0.049422502517700195, -0.11740309000015259, -0.07493529468774796, 0.19575566053390503, -0.1735633909702301, 0.031251147389411926, 0.04590725153684616, -0.0025283275172114372, -0.03604559972882271, -0.1015295684337616, 0.1855563372373581, 0.115689717233181, 0.029043490067124367, -0.06468375772237778, 0.021510954946279526, 0.23604612052440643, -0.06488166004419327, 0.12517660856246948, -0.10666045546531677, -0.0214394424110651, -0.10116925090551376, 0.07036390900611877, -0.04489133507013321, 0.06192108243703842, 0.008749796077609062, -0.06883176416158676, -0.043268706649541855, -0.06734978407621384, 0.060572586953639984, 0.013696033507585526, 0.12471915036439896, 0.08866573870182037, -0.14726679027080536, 0.1861436665058136, -0.06764497607946396, -0.022869233042001724, -0.07787051796913147, -0.0626029297709465, -0.09480064362287521, 0.007011027075350285, -0.23434391617774963, 0.051180921494960785, -0.0034816982224583626, -0.047603584825992584, 0.04357585310935974, 0.06350784003734589, -0.06255022436380386, -0.029021097347140312, 0.13090452551841736, -0.07183370739221573, 0.0406806506216526, 0.0912569910287857, 0.06966909021139145, -0.04271659627556801, -0.06523261219263077, 0.021988242864608765, 0.04822417348623276, 0.02260378934442997, -0.0029231877997517586, 0.08927913010120392, -0.15450848639011383, 0.0841890424489975, 0.10300107300281525, 0.018020235002040863, -0.1498057246208191, 0.18566276133060455, 0.01889757253229618, -0.0572521947324276, -0.002056330442428589, 0.09334561228752136, -0.09221111238002777, -0.10835270583629608, -0.007194914855062962, 0.1301790475845337, -0.07531335204839706, -0.12765634059906006, 0.04676225781440735, 0.0675758421421051, 0.014224324375391006, -0.13748052716255188, 0.08976376056671143, 0.06581946462392807, 0.04986824840307236, -0.0943504348397255, 0.059834208339452744, 0.0264125969260931, -0.08494354039430618, -0.03349066525697708, -0.0442795529961586, -0.15548647940158844, -0.0038881979417055845, 0.03696587681770325, -0.021055109798908234, -0.04704056680202484, -0.04689568281173706, 0.051509417593479156, -0.11134118586778641, 0.023202069103717804, -0.06423036754131317, -0.009744901210069656, 0.002879494335502386, -0.03788705915212631, -0.02142927423119545, -0.0006724707782268524, -0.12909698486328125, -0.04507853090763092, -0.04292786121368408, 0.05088891088962555, -0.18488961458206177, -0.08388516306877136, 0.09697677195072174, 0.032258354127407074, 0.09917040169239044, 0.16176655888557434, -0.03441382944583893, 0.06861016154289246, -0.09982761740684509, -0.12281396239995956, 0.03404206037521362, 0.06572362035512924, -0.0114216897636652, -0.012391975149512291, -0.05742845684289932, 0.1139708086848259, -0.059929780662059784, -0.016626618802547455, 0.019706591963768005, -0.11208698898553848, -0.004722362384200096, -0.03516071289777756, -0.09340212494134903, 0.0005916887894272804, -0.15191976726055145, 0.10992825031280518, 0.12998360395431519, 0.1232869029045105, 0.022709781304001808, 0.0018658379558473825, -0.03399617597460747, -0.014928711578249931, -0.03424418345093727, 0.008166313171386719, -0.13012219965457916, -0.02021569013595581, -0.05473481863737106, -0.07026353478431702, 0.3453385829925537, -0.0713987648487091, -0.08814428001642227, 0.036490559577941895, 0.16787809133529663, 0.028277646750211716, -0.016981463879346848, 0.29859083890914917, 0.06687676161527634, 0.0442349836230278, -0.017344459891319275, 0.056957636028528214, 0.03570486977696419, -0.09372188150882721, 0.03143572062253952, 0.04594331979751587, 0.16095522046089172, 0.019649144262075424, -0.018549226224422455, -0.19943395256996155, -0.05679013207554817, 0.02018466778099537, 0.032557982951402664, 0.03642534837126732, 0.008168441243469715, 0.10181158035993576, 0.07190997898578644, -0.08033566176891327, -0.021548662334680557, -0.0543053075671196, -0.02464166097342968, -0.1398187279701233, -0.11462698876857758, -0.007858951576054096, -0.16689884662628174, -0.018185485154390335, -0.01643109880387783, 0.02339163050055504, 0.21498200297355652, 0.00435562152415514, 0.02406521514058113, -0.03138691186904907, -0.15899211168289185, -0.012396685779094696, -0.15035462379455566, -0.08704906702041626, -0.03440994396805763, 0.009074914269149303, -0.04439608380198479, 0.047439828515052795, -0.07307280600070953, 0.013456115499138832, 0.0785612165927887, 0.1178554818034172, 0.13098010420799255, -0.11406239867210388, -0.05986625701189041, -0.0893315002322197, 0.044087864458560944, -0.023389099165797234, 0.13558006286621094, 0.09082266688346863, 0.07537633180618286, 0.12402705103158951, 0.08760827034711838, -0.0734214335680008, -0.21223437786102295, -0.06155582517385483, 0.0076141380704939365, 0.04086441546678543, 0.051206864416599274, -0.0628214031457901, -0.07044647634029388, -0.06000617519021034, 0.2021888792514801, 0.26144981384277344, -0.08439701795578003, -0.006433413363993168, -0.01901260018348694, 0.029973763972520828, 0.003361658426001668, 0.09535866975784302, 0.05770188570022583, 0.1381915956735611, -0.08491244912147522, 0.004756912589073181, -0.05520184338092804, -0.020621197298169136, -0.13648098707199097, 0.00957599189132452, -0.016380751505494118, -0.09397032856941223, -0.012108838185667992, 0.16491204500198364, -0.1144275814294815, 0.08604748547077179, -0.023677946999669075, -0.046795234084129333, -0.007800763472914696, -0.007448018528521061, 0.13672533631324768, 0.025979451835155487, -0.010713894851505756, -0.09481768310070038, 0.02109823003411293, 0.04681764915585518, -0.02929430827498436, -0.3183463215827942, -0.12281525135040283, 0.07394041121006012, 0.04519334435462952, 0.22965559363365173, 0.013662943616509438, 0.11978951841592789, 0.047954536974430084, 0.03315018117427826, -0.13952648639678955, 0.10076978802680969, 0.037388645112514496, -0.015429372899234295, -0.044188909232616425, -0.18003998696804047, -0.15136447548866272, -0.006769988685846329, 0.014348662458360195, -0.06916264444589615, -0.00039906473830342293, 0.16916608810424805, 0.04301277920603752, -0.07904322445392609, -0.0496673583984375, -0.10481718182563782, 0.08156544715166092, -0.053420573472976685, -0.046385567635297775, -0.021644698455929756, -0.062274184077978134, 0.06372947245836258, 0.04975148290395737, -0.22853875160217285, -0.08036629110574722, 0.04481687769293785, -0.03260484337806702, 0.12874895334243774, 0.05968894064426422, 0.02800161764025688, 0.044961459934711456, -0.06490800529718399, 0.0005886848084628582, -0.046331971883773804, 0.09132375568151474, 0.03482279181480408, -0.010073130019009113, 0.007286872714757919, -0.10494045913219452, 0.03212354704737663, 0.004542812705039978, -0.10290689766407013, -0.13175883889198303 ]
f3141b096e067eeef930550c6152741434ec95bf
# Dataset Card for Evaluation run of vanillaOVO/supermario_v2 <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [vanillaOVO/supermario_v2](https://huggingface.co/vanillaOVO/supermario_v2) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_vanillaOVO__supermario_v2", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-01-25T17:31:48.742417](https://huggingface.co/datasets/open-llm-leaderboard/details_vanillaOVO__supermario_v2/blob/main/results_2024-01-25T17-31-48.742417.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.6559999152632306, "acc_stderr": 0.032043161212488, "acc_norm": 0.6552568031435213, "acc_norm_stderr": 0.03271482699591929, "mc1": 0.565483476132191, "mc1_stderr": 0.01735273874925956, "mc2": 0.7122019171893207, "mc2_stderr": 0.014799117031048576 }, "harness|arc:challenge|25": { "acc": 0.7056313993174061, "acc_stderr": 0.01331852846053942, "acc_norm": 0.7295221843003413, "acc_norm_stderr": 0.012980954547659556 }, "harness|hellaswag|10": { "acc": 0.7126070503883688, "acc_stderr": 0.004516215206715354, "acc_norm": 0.8852818163712408, "acc_norm_stderr": 0.0031803024181887253 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.35, "acc_stderr": 0.0479372485441102, "acc_norm": 0.35, "acc_norm_stderr": 0.0479372485441102 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.6666666666666666, "acc_stderr": 0.04072314811876837, "acc_norm": 0.6666666666666666, "acc_norm_stderr": 0.04072314811876837 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.7105263157894737, "acc_stderr": 0.03690677986137283, "acc_norm": 0.7105263157894737, "acc_norm_stderr": 0.03690677986137283 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.63, "acc_stderr": 0.04852365870939099, "acc_norm": 0.63, "acc_norm_stderr": 0.04852365870939099 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.7132075471698113, "acc_stderr": 0.027834912527544067, "acc_norm": 0.7132075471698113, "acc_norm_stderr": 0.027834912527544067 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.7777777777777778, "acc_stderr": 0.03476590104304134, "acc_norm": 0.7777777777777778, "acc_norm_stderr": 0.03476590104304134 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.46, "acc_stderr": 0.05009082659620333, "acc_norm": 0.46, "acc_norm_stderr": 0.05009082659620333 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.54, "acc_stderr": 0.05009082659620333, "acc_norm": 0.54, "acc_norm_stderr": 0.05009082659620333 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.33, "acc_stderr": 0.047258156262526045, "acc_norm": 0.33, "acc_norm_stderr": 0.047258156262526045 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.653179190751445, "acc_stderr": 0.036291466701596636, "acc_norm": 0.653179190751445, "acc_norm_stderr": 0.036291466701596636 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.4215686274509804, "acc_stderr": 0.04913595201274498, "acc_norm": 0.4215686274509804, "acc_norm_stderr": 0.04913595201274498 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.76, "acc_stderr": 0.04292346959909283, "acc_norm": 0.76, "acc_norm_stderr": 0.04292346959909283 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.5914893617021276, "acc_stderr": 0.032134180267015755, "acc_norm": 0.5914893617021276, "acc_norm_stderr": 0.032134180267015755 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.4824561403508772, "acc_stderr": 0.04700708033551038, "acc_norm": 0.4824561403508772, "acc_norm_stderr": 0.04700708033551038 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.5517241379310345, "acc_stderr": 0.04144311810878152, "acc_norm": 0.5517241379310345, "acc_norm_stderr": 0.04144311810878152 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.42857142857142855, "acc_stderr": 0.02548718714785938, "acc_norm": 0.42857142857142855, "acc_norm_stderr": 0.02548718714785938 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.47619047619047616, "acc_stderr": 0.04467062628403273, "acc_norm": 0.47619047619047616, "acc_norm_stderr": 0.04467062628403273 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.34, "acc_stderr": 0.04760952285695235, "acc_norm": 0.34, "acc_norm_stderr": 0.04760952285695235 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.7806451612903226, "acc_stderr": 0.023540799358723295, "acc_norm": 0.7806451612903226, "acc_norm_stderr": 0.023540799358723295 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.4876847290640394, "acc_stderr": 0.035169204442208966, "acc_norm": 0.4876847290640394, "acc_norm_stderr": 0.035169204442208966 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.71, "acc_stderr": 0.045604802157206845, "acc_norm": 0.71, "acc_norm_stderr": 0.045604802157206845 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.7757575757575758, "acc_stderr": 0.03256866661681102, "acc_norm": 0.7757575757575758, "acc_norm_stderr": 0.03256866661681102 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.803030303030303, "acc_stderr": 0.028335609732463362, "acc_norm": 0.803030303030303, "acc_norm_stderr": 0.028335609732463362 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.8963730569948186, "acc_stderr": 0.02199531196364424, "acc_norm": 0.8963730569948186, "acc_norm_stderr": 0.02199531196364424 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.6820512820512821, "acc_stderr": 0.02361088430892786, "acc_norm": 0.6820512820512821, "acc_norm_stderr": 0.02361088430892786 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.34444444444444444, "acc_stderr": 0.02897264888484427, "acc_norm": 0.34444444444444444, "acc_norm_stderr": 0.02897264888484427 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.6722689075630253, "acc_stderr": 0.03048991141767323, "acc_norm": 0.6722689075630253, "acc_norm_stderr": 0.03048991141767323 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.3576158940397351, "acc_stderr": 0.03913453431177258, "acc_norm": 0.3576158940397351, "acc_norm_stderr": 0.03913453431177258 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.8458715596330275, "acc_stderr": 0.015480826865374303, "acc_norm": 0.8458715596330275, "acc_norm_stderr": 0.015480826865374303 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.5185185185185185, "acc_stderr": 0.034076320938540516, "acc_norm": 0.5185185185185185, "acc_norm_stderr": 0.034076320938540516 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.8284313725490197, "acc_stderr": 0.026460569561240644, "acc_norm": 0.8284313725490197, "acc_norm_stderr": 0.026460569561240644 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.810126582278481, "acc_stderr": 0.02553010046023349, "acc_norm": 0.810126582278481, "acc_norm_stderr": 0.02553010046023349 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.6860986547085202, "acc_stderr": 0.031146796482972465, "acc_norm": 0.6860986547085202, "acc_norm_stderr": 0.031146796482972465 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.7862595419847328, "acc_stderr": 0.0359546161177469, "acc_norm": 0.7862595419847328, "acc_norm_stderr": 0.0359546161177469 }, "harness|hendrycksTest-international_law|5": { "acc": 0.7933884297520661, "acc_stderr": 0.03695980128098824, "acc_norm": 0.7933884297520661, "acc_norm_stderr": 0.03695980128098824 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.7592592592592593, "acc_stderr": 0.04133119440243839, "acc_norm": 0.7592592592592593, "acc_norm_stderr": 0.04133119440243839 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.7668711656441718, "acc_stderr": 0.0332201579577674, "acc_norm": 0.7668711656441718, "acc_norm_stderr": 0.0332201579577674 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.45535714285714285, "acc_stderr": 0.047268355537191, "acc_norm": 0.45535714285714285, "acc_norm_stderr": 0.047268355537191 }, "harness|hendrycksTest-management|5": { "acc": 0.7864077669902912, "acc_stderr": 0.040580420156460344, "acc_norm": 0.7864077669902912, "acc_norm_stderr": 0.040580420156460344 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8803418803418803, "acc_stderr": 0.021262719400406974, "acc_norm": 0.8803418803418803, "acc_norm_stderr": 0.021262719400406974 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.72, "acc_stderr": 0.04512608598542128, "acc_norm": 0.72, "acc_norm_stderr": 0.04512608598542128 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.8326947637292464, "acc_stderr": 0.013347327202920332, "acc_norm": 0.8326947637292464, "acc_norm_stderr": 0.013347327202920332 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.7427745664739884, "acc_stderr": 0.02353292543104429, "acc_norm": 0.7427745664739884, "acc_norm_stderr": 0.02353292543104429 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.42905027932960893, "acc_stderr": 0.016553287863116037, "acc_norm": 0.42905027932960893, "acc_norm_stderr": 0.016553287863116037 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.7156862745098039, "acc_stderr": 0.025829163272757482, "acc_norm": 0.7156862745098039, "acc_norm_stderr": 0.025829163272757482 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.7234726688102894, "acc_stderr": 0.025403832978179615, "acc_norm": 0.7234726688102894, "acc_norm_stderr": 0.025403832978179615 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.75, "acc_stderr": 0.02409347123262133, "acc_norm": 0.75, "acc_norm_stderr": 0.02409347123262133 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.48936170212765956, "acc_stderr": 0.029820747191422473, "acc_norm": 0.48936170212765956, "acc_norm_stderr": 0.029820747191422473 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.46936114732724904, "acc_stderr": 0.012746237711716634, "acc_norm": 0.46936114732724904, "acc_norm_stderr": 0.012746237711716634 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.6764705882352942, "acc_stderr": 0.02841820861940676, "acc_norm": 0.6764705882352942, "acc_norm_stderr": 0.02841820861940676 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.6781045751633987, "acc_stderr": 0.018901015322093092, "acc_norm": 0.6781045751633987, "acc_norm_stderr": 0.018901015322093092 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6909090909090909, "acc_stderr": 0.044262946482000985, "acc_norm": 0.6909090909090909, "acc_norm_stderr": 0.044262946482000985 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.7387755102040816, "acc_stderr": 0.028123429335142783, "acc_norm": 0.7387755102040816, "acc_norm_stderr": 0.028123429335142783 }, "harness|hendrycksTest-sociology|5": { "acc": 0.845771144278607, "acc_stderr": 0.025538433368578337, "acc_norm": 0.845771144278607, "acc_norm_stderr": 0.025538433368578337 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.86, "acc_stderr": 0.0348735088019777, "acc_norm": 0.86, "acc_norm_stderr": 0.0348735088019777 }, "harness|hendrycksTest-virology|5": { "acc": 0.5481927710843374, "acc_stderr": 0.03874371556587953, "acc_norm": 0.5481927710843374, "acc_norm_stderr": 0.03874371556587953 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.8245614035087719, "acc_stderr": 0.029170885500727665, "acc_norm": 0.8245614035087719, "acc_norm_stderr": 0.029170885500727665 }, "harness|truthfulqa:mc|0": { "mc1": 0.565483476132191, "mc1_stderr": 0.01735273874925956, "mc2": 0.7122019171893207, "mc2_stderr": 0.014799117031048576 }, "harness|winogrande|5": { "acc": 0.8389897395422258, "acc_stderr": 0.010329712832785722 }, "harness|gsm8k|5": { "acc": 0.7134192570128886, "acc_stderr": 0.012454841668337697 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_vanillaOVO__supermario_v2
[ "region:us" ]
2024-01-25T17:34:11+00:00
{"pretty_name": "Evaluation run of vanillaOVO/supermario_v2", "dataset_summary": "Dataset automatically created during the evaluation run of model [vanillaOVO/supermario_v2](https://huggingface.co/vanillaOVO/supermario_v2) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_vanillaOVO__supermario_v2\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-01-25T17:31:48.742417](https://huggingface.co/datasets/open-llm-leaderboard/details_vanillaOVO__supermario_v2/blob/main/results_2024-01-25T17-31-48.742417.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.6559999152632306,\n \"acc_stderr\": 0.032043161212488,\n \"acc_norm\": 0.6552568031435213,\n \"acc_norm_stderr\": 0.03271482699591929,\n \"mc1\": 0.565483476132191,\n \"mc1_stderr\": 0.01735273874925956,\n \"mc2\": 0.7122019171893207,\n \"mc2_stderr\": 0.014799117031048576\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.7056313993174061,\n \"acc_stderr\": 0.01331852846053942,\n \"acc_norm\": 0.7295221843003413,\n \"acc_norm_stderr\": 0.012980954547659556\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.7126070503883688,\n \"acc_stderr\": 0.004516215206715354,\n \"acc_norm\": 0.8852818163712408,\n \"acc_norm_stderr\": 0.0031803024181887253\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.35,\n \"acc_stderr\": 0.0479372485441102,\n \"acc_norm\": 0.35,\n \"acc_norm_stderr\": 0.0479372485441102\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.6666666666666666,\n \"acc_stderr\": 0.04072314811876837,\n \"acc_norm\": 0.6666666666666666,\n \"acc_norm_stderr\": 0.04072314811876837\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.7105263157894737,\n \"acc_stderr\": 0.03690677986137283,\n \"acc_norm\": 0.7105263157894737,\n \"acc_norm_stderr\": 0.03690677986137283\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.63,\n \"acc_stderr\": 0.04852365870939099,\n \"acc_norm\": 0.63,\n \"acc_norm_stderr\": 0.04852365870939099\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.7132075471698113,\n \"acc_stderr\": 0.027834912527544067,\n \"acc_norm\": 0.7132075471698113,\n \"acc_norm_stderr\": 0.027834912527544067\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.7777777777777778,\n \"acc_stderr\": 0.03476590104304134,\n \"acc_norm\": 0.7777777777777778,\n \"acc_norm_stderr\": 0.03476590104304134\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.46,\n \"acc_stderr\": 0.05009082659620333,\n \"acc_norm\": 0.46,\n \"acc_norm_stderr\": 0.05009082659620333\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.54,\n \"acc_stderr\": 0.05009082659620333,\n \"acc_norm\": 0.54,\n \"acc_norm_stderr\": 0.05009082659620333\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.33,\n \"acc_stderr\": 0.047258156262526045,\n \"acc_norm\": 0.33,\n \"acc_norm_stderr\": 0.047258156262526045\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.653179190751445,\n \"acc_stderr\": 0.036291466701596636,\n \"acc_norm\": 0.653179190751445,\n \"acc_norm_stderr\": 0.036291466701596636\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.4215686274509804,\n \"acc_stderr\": 0.04913595201274498,\n \"acc_norm\": 0.4215686274509804,\n \"acc_norm_stderr\": 0.04913595201274498\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.76,\n \"acc_stderr\": 0.04292346959909283,\n \"acc_norm\": 0.76,\n \"acc_norm_stderr\": 0.04292346959909283\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.5914893617021276,\n \"acc_stderr\": 0.032134180267015755,\n \"acc_norm\": 0.5914893617021276,\n \"acc_norm_stderr\": 0.032134180267015755\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.4824561403508772,\n \"acc_stderr\": 0.04700708033551038,\n \"acc_norm\": 0.4824561403508772,\n \"acc_norm_stderr\": 0.04700708033551038\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.5517241379310345,\n \"acc_stderr\": 0.04144311810878152,\n \"acc_norm\": 0.5517241379310345,\n \"acc_norm_stderr\": 0.04144311810878152\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.42857142857142855,\n \"acc_stderr\": 0.02548718714785938,\n \"acc_norm\": 0.42857142857142855,\n \"acc_norm_stderr\": 0.02548718714785938\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.47619047619047616,\n \"acc_stderr\": 0.04467062628403273,\n \"acc_norm\": 0.47619047619047616,\n \"acc_norm_stderr\": 0.04467062628403273\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.34,\n \"acc_stderr\": 0.04760952285695235,\n \"acc_norm\": 0.34,\n \"acc_norm_stderr\": 0.04760952285695235\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.7806451612903226,\n \"acc_stderr\": 0.023540799358723295,\n \"acc_norm\": 0.7806451612903226,\n \"acc_norm_stderr\": 0.023540799358723295\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.4876847290640394,\n \"acc_stderr\": 0.035169204442208966,\n \"acc_norm\": 0.4876847290640394,\n \"acc_norm_stderr\": 0.035169204442208966\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.71,\n \"acc_stderr\": 0.045604802157206845,\n \"acc_norm\": 0.71,\n \"acc_norm_stderr\": 0.045604802157206845\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.7757575757575758,\n \"acc_stderr\": 0.03256866661681102,\n \"acc_norm\": 0.7757575757575758,\n \"acc_norm_stderr\": 0.03256866661681102\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.803030303030303,\n \"acc_stderr\": 0.028335609732463362,\n \"acc_norm\": 0.803030303030303,\n \"acc_norm_stderr\": 0.028335609732463362\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.8963730569948186,\n \"acc_stderr\": 0.02199531196364424,\n \"acc_norm\": 0.8963730569948186,\n \"acc_norm_stderr\": 0.02199531196364424\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.6820512820512821,\n \"acc_stderr\": 0.02361088430892786,\n \"acc_norm\": 0.6820512820512821,\n \"acc_norm_stderr\": 0.02361088430892786\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.34444444444444444,\n \"acc_stderr\": 0.02897264888484427,\n \"acc_norm\": 0.34444444444444444,\n \"acc_norm_stderr\": 0.02897264888484427\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.6722689075630253,\n \"acc_stderr\": 0.03048991141767323,\n \"acc_norm\": 0.6722689075630253,\n \"acc_norm_stderr\": 0.03048991141767323\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.3576158940397351,\n \"acc_stderr\": 0.03913453431177258,\n \"acc_norm\": 0.3576158940397351,\n \"acc_norm_stderr\": 0.03913453431177258\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.8458715596330275,\n \"acc_stderr\": 0.015480826865374303,\n \"acc_norm\": 0.8458715596330275,\n \"acc_norm_stderr\": 0.015480826865374303\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.5185185185185185,\n \"acc_stderr\": 0.034076320938540516,\n \"acc_norm\": 0.5185185185185185,\n \"acc_norm_stderr\": 0.034076320938540516\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.8284313725490197,\n \"acc_stderr\": 0.026460569561240644,\n \"acc_norm\": 0.8284313725490197,\n \"acc_norm_stderr\": 0.026460569561240644\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.810126582278481,\n \"acc_stderr\": 0.02553010046023349,\n \"acc_norm\": 0.810126582278481,\n \"acc_norm_stderr\": 0.02553010046023349\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.6860986547085202,\n \"acc_stderr\": 0.031146796482972465,\n \"acc_norm\": 0.6860986547085202,\n \"acc_norm_stderr\": 0.031146796482972465\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.7862595419847328,\n \"acc_stderr\": 0.0359546161177469,\n \"acc_norm\": 0.7862595419847328,\n \"acc_norm_stderr\": 0.0359546161177469\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.7933884297520661,\n \"acc_stderr\": 0.03695980128098824,\n \"acc_norm\": 0.7933884297520661,\n \"acc_norm_stderr\": 0.03695980128098824\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.7592592592592593,\n \"acc_stderr\": 0.04133119440243839,\n \"acc_norm\": 0.7592592592592593,\n \"acc_norm_stderr\": 0.04133119440243839\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.7668711656441718,\n \"acc_stderr\": 0.0332201579577674,\n \"acc_norm\": 0.7668711656441718,\n \"acc_norm_stderr\": 0.0332201579577674\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.45535714285714285,\n \"acc_stderr\": 0.047268355537191,\n \"acc_norm\": 0.45535714285714285,\n \"acc_norm_stderr\": 0.047268355537191\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.7864077669902912,\n \"acc_stderr\": 0.040580420156460344,\n \"acc_norm\": 0.7864077669902912,\n \"acc_norm_stderr\": 0.040580420156460344\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8803418803418803,\n \"acc_stderr\": 0.021262719400406974,\n \"acc_norm\": 0.8803418803418803,\n \"acc_norm_stderr\": 0.021262719400406974\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.72,\n \"acc_stderr\": 0.04512608598542128,\n \"acc_norm\": 0.72,\n \"acc_norm_stderr\": 0.04512608598542128\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.8326947637292464,\n \"acc_stderr\": 0.013347327202920332,\n \"acc_norm\": 0.8326947637292464,\n \"acc_norm_stderr\": 0.013347327202920332\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.7427745664739884,\n \"acc_stderr\": 0.02353292543104429,\n \"acc_norm\": 0.7427745664739884,\n \"acc_norm_stderr\": 0.02353292543104429\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.42905027932960893,\n \"acc_stderr\": 0.016553287863116037,\n \"acc_norm\": 0.42905027932960893,\n \"acc_norm_stderr\": 0.016553287863116037\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.7156862745098039,\n \"acc_stderr\": 0.025829163272757482,\n \"acc_norm\": 0.7156862745098039,\n \"acc_norm_stderr\": 0.025829163272757482\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.7234726688102894,\n \"acc_stderr\": 0.025403832978179615,\n \"acc_norm\": 0.7234726688102894,\n \"acc_norm_stderr\": 0.025403832978179615\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.75,\n \"acc_stderr\": 0.02409347123262133,\n \"acc_norm\": 0.75,\n \"acc_norm_stderr\": 0.02409347123262133\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.48936170212765956,\n \"acc_stderr\": 0.029820747191422473,\n \"acc_norm\": 0.48936170212765956,\n \"acc_norm_stderr\": 0.029820747191422473\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.46936114732724904,\n \"acc_stderr\": 0.012746237711716634,\n \"acc_norm\": 0.46936114732724904,\n \"acc_norm_stderr\": 0.012746237711716634\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.6764705882352942,\n \"acc_stderr\": 0.02841820861940676,\n \"acc_norm\": 0.6764705882352942,\n \"acc_norm_stderr\": 0.02841820861940676\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.6781045751633987,\n \"acc_stderr\": 0.018901015322093092,\n \"acc_norm\": 0.6781045751633987,\n \"acc_norm_stderr\": 0.018901015322093092\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6909090909090909,\n \"acc_stderr\": 0.044262946482000985,\n \"acc_norm\": 0.6909090909090909,\n \"acc_norm_stderr\": 0.044262946482000985\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.7387755102040816,\n \"acc_stderr\": 0.028123429335142783,\n \"acc_norm\": 0.7387755102040816,\n \"acc_norm_stderr\": 0.028123429335142783\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.845771144278607,\n \"acc_stderr\": 0.025538433368578337,\n \"acc_norm\": 0.845771144278607,\n \"acc_norm_stderr\": 0.025538433368578337\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.86,\n \"acc_stderr\": 0.0348735088019777,\n \"acc_norm\": 0.86,\n \"acc_norm_stderr\": 0.0348735088019777\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5481927710843374,\n \"acc_stderr\": 0.03874371556587953,\n \"acc_norm\": 0.5481927710843374,\n \"acc_norm_stderr\": 0.03874371556587953\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.8245614035087719,\n \"acc_stderr\": 0.029170885500727665,\n \"acc_norm\": 0.8245614035087719,\n \"acc_norm_stderr\": 0.029170885500727665\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.565483476132191,\n \"mc1_stderr\": 0.01735273874925956,\n \"mc2\": 0.7122019171893207,\n \"mc2_stderr\": 0.014799117031048576\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.8389897395422258,\n \"acc_stderr\": 0.010329712832785722\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.7134192570128886,\n \"acc_stderr\": 0.012454841668337697\n }\n}\n```", "repo_url": "https://huggingface.co/vanillaOVO/supermario_v2", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_01_25T17_31_48.742417", "path": ["**/details_harness|arc:challenge|25_2024-01-25T17-31-48.742417.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-01-25T17-31-48.742417.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_01_25T17_31_48.742417", "path": ["**/details_harness|gsm8k|5_2024-01-25T17-31-48.742417.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-01-25T17-31-48.742417.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_01_25T17_31_48.742417", "path": ["**/details_harness|hellaswag|10_2024-01-25T17-31-48.742417.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-01-25T17-31-48.742417.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_01_25T17_31_48.742417", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-25T17-31-48.742417.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-25T17-31-48.742417.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-25T17-31-48.742417.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-25T17-31-48.742417.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-25T17-31-48.742417.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-25T17-31-48.742417.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-25T17-31-48.742417.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-25T17-31-48.742417.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-25T17-31-48.742417.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-25T17-31-48.742417.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-25T17-31-48.742417.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-25T17-31-48.742417.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-25T17-31-48.742417.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-25T17-31-48.742417.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-25T17-31-48.742417.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-25T17-31-48.742417.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-25T17-31-48.742417.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-25T17-31-48.742417.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-25T17-31-48.742417.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-25T17-31-48.742417.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-25T17-31-48.742417.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-25T17-31-48.742417.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-25T17-31-48.742417.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-25T17-31-48.742417.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-25T17-31-48.742417.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-25T17-31-48.742417.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-25T17-31-48.742417.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-25T17-31-48.742417.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-25T17-31-48.742417.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-25T17-31-48.742417.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-25T17-31-48.742417.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-25T17-31-48.742417.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-25T17-31-48.742417.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-25T17-31-48.742417.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-25T17-31-48.742417.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-25T17-31-48.742417.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-25T17-31-48.742417.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-25T17-31-48.742417.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-25T17-31-48.742417.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-25T17-31-48.742417.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-25T17-31-48.742417.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-25T17-31-48.742417.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-25T17-31-48.742417.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-25T17-31-48.742417.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-25T17-31-48.742417.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-25T17-31-48.742417.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-25T17-31-48.742417.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-25T17-31-48.742417.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-25T17-31-48.742417.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-25T17-31-48.742417.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-25T17-31-48.742417.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-25T17-31-48.742417.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-25T17-31-48.742417.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-25T17-31-48.742417.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-25T17-31-48.742417.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-25T17-31-48.742417.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-25T17-31-48.742417.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-25T17-31-48.742417.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-25T17-31-48.742417.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-25T17-31-48.742417.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-25T17-31-48.742417.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-25T17-31-48.742417.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-25T17-31-48.742417.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-25T17-31-48.742417.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-25T17-31-48.742417.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-25T17-31-48.742417.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-25T17-31-48.742417.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-25T17-31-48.742417.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-25T17-31-48.742417.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-25T17-31-48.742417.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-25T17-31-48.742417.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-25T17-31-48.742417.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-25T17-31-48.742417.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-25T17-31-48.742417.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-25T17-31-48.742417.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-25T17-31-48.742417.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-25T17-31-48.742417.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-25T17-31-48.742417.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-25T17-31-48.742417.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-25T17-31-48.742417.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-25T17-31-48.742417.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-25T17-31-48.742417.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-25T17-31-48.742417.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-25T17-31-48.742417.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-25T17-31-48.742417.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-25T17-31-48.742417.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-25T17-31-48.742417.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-25T17-31-48.742417.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-25T17-31-48.742417.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-25T17-31-48.742417.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-25T17-31-48.742417.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-25T17-31-48.742417.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-25T17-31-48.742417.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-25T17-31-48.742417.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-25T17-31-48.742417.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-25T17-31-48.742417.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-25T17-31-48.742417.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-25T17-31-48.742417.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-25T17-31-48.742417.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-25T17-31-48.742417.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-25T17-31-48.742417.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-25T17-31-48.742417.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-25T17-31-48.742417.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-25T17-31-48.742417.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-25T17-31-48.742417.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-25T17-31-48.742417.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-25T17-31-48.742417.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-25T17-31-48.742417.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-25T17-31-48.742417.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-25T17-31-48.742417.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-25T17-31-48.742417.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-25T17-31-48.742417.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-25T17-31-48.742417.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-25T17-31-48.742417.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_01_25T17_31_48.742417", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-25T17-31-48.742417.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-25T17-31-48.742417.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_01_25T17_31_48.742417", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-25T17-31-48.742417.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-25T17-31-48.742417.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_01_25T17_31_48.742417", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-25T17-31-48.742417.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-25T17-31-48.742417.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_01_25T17_31_48.742417", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-25T17-31-48.742417.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-25T17-31-48.742417.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_01_25T17_31_48.742417", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-25T17-31-48.742417.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-25T17-31-48.742417.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_01_25T17_31_48.742417", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-25T17-31-48.742417.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-25T17-31-48.742417.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_01_25T17_31_48.742417", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-25T17-31-48.742417.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-25T17-31-48.742417.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_01_25T17_31_48.742417", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-25T17-31-48.742417.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-25T17-31-48.742417.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_01_25T17_31_48.742417", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-25T17-31-48.742417.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-25T17-31-48.742417.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_01_25T17_31_48.742417", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-25T17-31-48.742417.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-25T17-31-48.742417.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_01_25T17_31_48.742417", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-25T17-31-48.742417.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-25T17-31-48.742417.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_01_25T17_31_48.742417", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-25T17-31-48.742417.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-25T17-31-48.742417.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_01_25T17_31_48.742417", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-25T17-31-48.742417.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-25T17-31-48.742417.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_01_25T17_31_48.742417", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-25T17-31-48.742417.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-25T17-31-48.742417.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_01_25T17_31_48.742417", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-25T17-31-48.742417.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-25T17-31-48.742417.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_01_25T17_31_48.742417", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-25T17-31-48.742417.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-25T17-31-48.742417.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_01_25T17_31_48.742417", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-25T17-31-48.742417.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-25T17-31-48.742417.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_01_25T17_31_48.742417", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-25T17-31-48.742417.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-25T17-31-48.742417.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_01_25T17_31_48.742417", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-25T17-31-48.742417.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-25T17-31-48.742417.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_01_25T17_31_48.742417", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-25T17-31-48.742417.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-25T17-31-48.742417.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_01_25T17_31_48.742417", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-25T17-31-48.742417.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-25T17-31-48.742417.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_01_25T17_31_48.742417", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-25T17-31-48.742417.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-25T17-31-48.742417.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_01_25T17_31_48.742417", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-25T17-31-48.742417.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-25T17-31-48.742417.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_01_25T17_31_48.742417", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-25T17-31-48.742417.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-25T17-31-48.742417.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_01_25T17_31_48.742417", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-25T17-31-48.742417.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-25T17-31-48.742417.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_01_25T17_31_48.742417", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-25T17-31-48.742417.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-25T17-31-48.742417.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_01_25T17_31_48.742417", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-25T17-31-48.742417.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-25T17-31-48.742417.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_01_25T17_31_48.742417", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-25T17-31-48.742417.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-25T17-31-48.742417.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_01_25T17_31_48.742417", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-25T17-31-48.742417.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-25T17-31-48.742417.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_01_25T17_31_48.742417", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-25T17-31-48.742417.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-25T17-31-48.742417.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_01_25T17_31_48.742417", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-25T17-31-48.742417.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-25T17-31-48.742417.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_01_25T17_31_48.742417", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-25T17-31-48.742417.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-25T17-31-48.742417.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_01_25T17_31_48.742417", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-25T17-31-48.742417.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-25T17-31-48.742417.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_01_25T17_31_48.742417", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-25T17-31-48.742417.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-25T17-31-48.742417.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_01_25T17_31_48.742417", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-25T17-31-48.742417.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-25T17-31-48.742417.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_01_25T17_31_48.742417", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-25T17-31-48.742417.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-25T17-31-48.742417.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_01_25T17_31_48.742417", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-25T17-31-48.742417.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-25T17-31-48.742417.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_01_25T17_31_48.742417", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-25T17-31-48.742417.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-25T17-31-48.742417.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_01_25T17_31_48.742417", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-25T17-31-48.742417.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-25T17-31-48.742417.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_01_25T17_31_48.742417", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-25T17-31-48.742417.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-25T17-31-48.742417.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_01_25T17_31_48.742417", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-25T17-31-48.742417.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-25T17-31-48.742417.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_01_25T17_31_48.742417", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-25T17-31-48.742417.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-25T17-31-48.742417.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_01_25T17_31_48.742417", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-25T17-31-48.742417.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-25T17-31-48.742417.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_01_25T17_31_48.742417", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-25T17-31-48.742417.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-25T17-31-48.742417.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_01_25T17_31_48.742417", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-25T17-31-48.742417.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-25T17-31-48.742417.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_01_25T17_31_48.742417", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-25T17-31-48.742417.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-25T17-31-48.742417.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_01_25T17_31_48.742417", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-25T17-31-48.742417.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-25T17-31-48.742417.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_01_25T17_31_48.742417", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-25T17-31-48.742417.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-25T17-31-48.742417.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_01_25T17_31_48.742417", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-25T17-31-48.742417.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-25T17-31-48.742417.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_01_25T17_31_48.742417", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-25T17-31-48.742417.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-25T17-31-48.742417.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_01_25T17_31_48.742417", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-25T17-31-48.742417.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-25T17-31-48.742417.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_01_25T17_31_48.742417", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-25T17-31-48.742417.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-25T17-31-48.742417.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_01_25T17_31_48.742417", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-25T17-31-48.742417.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-25T17-31-48.742417.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_01_25T17_31_48.742417", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-25T17-31-48.742417.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-25T17-31-48.742417.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_01_25T17_31_48.742417", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-25T17-31-48.742417.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-25T17-31-48.742417.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_01_25T17_31_48.742417", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-25T17-31-48.742417.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-25T17-31-48.742417.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_01_25T17_31_48.742417", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-25T17-31-48.742417.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-25T17-31-48.742417.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_01_25T17_31_48.742417", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-25T17-31-48.742417.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-25T17-31-48.742417.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_01_25T17_31_48.742417", "path": ["**/details_harness|winogrande|5_2024-01-25T17-31-48.742417.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-01-25T17-31-48.742417.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_01_25T17_31_48.742417", "path": ["results_2024-01-25T17-31-48.742417.parquet"]}, {"split": "latest", "path": ["results_2024-01-25T17-31-48.742417.parquet"]}]}]}
2024-01-25T17:34:36+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of vanillaOVO/supermario_v2 Dataset automatically created during the evaluation run of model vanillaOVO/supermario_v2 on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-01-25T17:31:48.742417(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of vanillaOVO/supermario_v2\n\n\n\nDataset automatically created during the evaluation run of model vanillaOVO/supermario_v2 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-25T17:31:48.742417(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of vanillaOVO/supermario_v2\n\n\n\nDataset automatically created during the evaluation run of model vanillaOVO/supermario_v2 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-25T17:31:48.742417(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ 6, 181, 68, 4, 40, 29, 3, 4, 9, 6, 5, 7, 4, 7, 10, 9, 5, 9, 8, 10, 46, 8, 7, 10, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of vanillaOVO/supermario_v2\n\n\n\nDataset automatically created during the evaluation run of model vanillaOVO/supermario_v2 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2024-01-25T17:31:48.742417(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):## Dataset Details### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:## Uses### Direct Use### Out-of-Scope Use## Dataset Structure## Dataset Creation### Curation Rationale### Source Data#### Data Collection and Processing#### Who are the source data producers?### Annotations [optional]#### Annotation process#### Who are the annotators?#### Personal and Sensitive Information## Bias, Risks, and Limitations### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:## Glossary [optional]## More Information [optional]## Dataset Card Authors [optional]## Dataset Card Contact" ]
[ -0.040980130434036255, 0.19903181493282318, -0.005934131797403097, 0.0455588772892952, 0.08441685140132904, -0.019328845664858818, 0.040418606251478195, 0.10361345112323761, 0.022765567526221275, 0.18862125277519226, -0.021932940930128098, 0.1004079282283783, 0.08199366182088852, 0.11040502786636353, 0.02862873487174511, -0.14563021063804626, 0.032721299678087234, -0.10364383459091187, 0.10777134448289871, 0.06103971600532532, 0.057195138186216354, -0.08972872048616409, 0.06798743456602097, -0.03130673989653587, 0.04212057590484619, -0.0014847962884232402, -0.06740724295377731, -0.02771926112473011, 0.10086085647344589, 0.10237011313438416, 0.03373629227280617, -0.0097487922757864, 0.04098191112279892, -0.2570497393608093, 0.017688509076833725, 0.10503596812486649, -0.009934009052813053, 0.04243519902229309, 0.15944188833236694, -0.07639729976654053, 0.07953060418367386, -0.028597306460142136, 0.06805216521024704, 0.056574705988168716, -0.1083923727273941, -0.14054855704307556, -0.15659771859645844, -0.0027717233169823885, 0.0667061060667038, 0.04117321968078613, -0.02262047864496708, 0.1376792937517166, -0.05664597451686859, 0.04824065789580345, 0.1394631266593933, -0.1442435383796692, -0.023638276383280754, 0.04767202213406563, 0.020895471796393394, 0.09451345354318619, -0.06947579234838486, -0.02330273762345314, 0.029374783858656883, 0.054043177515268326, -0.002246422227472067, 0.013073205016553402, 0.004454352427273989, 0.012191098183393478, -0.1515522599220276, -0.1226256787776947, 0.10828948765993118, 0.0078063830733299255, -0.05139508470892906, -0.16189132630825043, -0.03644637390971184, 0.01599465124309063, -0.002847060328349471, 0.015759795904159546, 0.00517371017485857, -0.01155788917094469, 0.0930427834391594, -0.008370563387870789, -0.08570754528045654, -0.03626739978790283, -0.01401976402848959, 0.05148644745349884, 0.029310734942555428, 0.004729931708425283, 0.0132399071007967, 0.1178036481142044, 0.006675480864942074, -0.054474711418151855, -0.07173504680395126, -0.05310950428247452, -0.10707386583089828, -0.034682419151067734, 0.013288639485836029, -0.08459453284740448, 0.04551314562559128, 0.21539819240570068, -0.009705601260066032, 0.023890027776360512, -0.12007619440555573, 0.018538661301136017, 0.12649521231651306, 0.06561226397752762, -0.09255529195070267, -0.062048349529504776, -0.03997504338622093, 0.02172520011663437, 0.027649352326989174, -0.01802445389330387, 0.01208849810063839, 0.07546083629131317, 0.003363201627507806, 0.12862728536128998, 0.12080758064985275, 0.018555788323283195, -0.062022674828767776, -0.02075936645269394, 0.20865754783153534, -0.15302370488643646, -0.001117956475354731, 0.02805325761437416, -0.0414581373333931, -0.08547258377075195, 0.04834512248635292, -0.011489235796034336, -0.07045413553714752, 0.12680242955684662, -0.04159901663661003, -0.07027512788772583, -0.09020459651947021, -0.08205799013376236, 0.034923046827316284, -0.017794199287891388, -0.06708043068647385, -0.06683819741010666, -0.10958093404769897, -0.08955347537994385, 0.02380462735891342, -0.07570644468069077, -0.007752974517643452, 0.010508216917514801, 0.016947727650403976, -0.014945005066692829, -0.013621443882584572, 0.11349937319755554, -0.07055086642503738, 0.028213756158947945, -0.04455210641026497, 0.03203238174319267, 0.1057654544711113, 0.029197368770837784, -0.10699814558029175, 0.07983604818582535, -0.10040926933288574, 0.09572789072990417, -0.11165940016508102, -0.01885840855538845, -0.12139984220266342, 0.0024664844386279583, -0.02522330917418003, 0.03686188533902168, -0.025505920872092247, 0.08424589782953262, -0.21239139139652252, -0.0049184588715434074, 0.18850216269493103, -0.11720913648605347, -0.0632462203502655, 0.09911863505840302, -0.03217391297221184, 0.03638030216097832, 0.04186611995100975, 0.08842169493436813, 0.09409169107675552, -0.07031192630529404, -0.09358499944210052, -0.055258627980947495, -0.03153179958462715, 0.15102702379226685, 0.06852184981107712, -0.09460436552762985, 0.10237845778465271, 0.0321008674800396, 0.007395423948764801, -0.06854328513145447, -0.011923326179385185, -0.057281967252492905, -0.020143887028098106, -0.059333715587854385, -0.07503465563058853, -0.01871858909726143, -0.07004570960998535, -0.013975601643323898, -0.06381090730428696, -0.006029529497027397, 0.10063041001558304, -0.02405453473329544, 0.027834827080368996, -0.08294394612312317, 0.0576154999434948, 0.006762993056327105, 0.013912905938923359, -0.2128659188747406, -0.05996684357523918, 0.040962815284729004, -0.18748144805431366, 0.046172332018613815, 0.015523085370659828, 0.013855582103133202, 0.06638333946466446, 0.0007176484214141965, 0.02143341489136219, 0.033074751496315, -0.013883489184081554, -0.005617819260805845, -0.1565685123205185, -0.04986086115241051, -0.08254678547382355, 0.04752412438392639, -0.10714750736951828, -0.0172959566116333, 0.061842553317546844, 0.14930219948291779, 0.02077065221965313, -0.07491135597229004, 0.04187046363949776, 0.016759878024458885, -0.04638255760073662, -0.056216608732938766, -0.0011675767600536346, -0.026174070313572884, 0.04674140363931656, 0.04994019493460655, -0.16412360966205597, -0.1063365787267685, 0.07423916459083557, 0.12573498487472534, -0.052331890910863876, -0.05838821455836296, -0.06794512271881104, -0.04934035614132881, -0.10286011546850204, -0.0630243718624115, 0.07221188396215439, 0.0931912288069725, 0.05647125095129013, -0.07258478552103043, -0.04732638597488403, 0.007015113718807697, 0.05015356466174126, -0.06495047360658646, 0.10611565411090851, 0.0994681864976883, -0.08167262375354767, 0.10336726158857346, -0.050371404737234116, 0.10019195079803467, 0.09674970805644989, 0.01711961254477501, -0.10878447443246841, 0.006387750152498484, 0.05658653378486633, 0.05289437994360924, 0.06066209077835083, -0.01651015318930149, 0.038562119007110596, 0.0838506817817688, -0.003842700971290469, 0.03791041672229767, -0.07063305377960205, 0.031367626041173935, 0.027549760416150093, -0.0005477875238284469, 0.022003550082445145, 0.006274946965277195, 0.022502779960632324, 0.09327587485313416, 0.01810346730053425, 0.08553571999073029, -0.04436061903834343, -0.06105603277683258, -0.09824704378843307, 0.13448047637939453, -0.08726602792739868, -0.2287590652704239, -0.16956211626529694, -0.03347102925181389, -0.0356709323823452, -0.004784960299730301, 0.05770439654588699, -0.0018406938761472702, -0.11249957233667374, -0.13072432577610016, 0.0450848825275898, 0.04217451810836792, -0.12445953488349915, -0.04729200899600983, 0.045482877641916275, -0.01771480217576027, -0.1651758998632431, 0.03657960146665573, 0.04516129940748215, -0.07435458153486252, 0.007262897677719593, 0.08033692091703415, 0.10597248375415802, 0.09729522466659546, 0.0891759991645813, -0.022754952311515808, -0.014950718730688095, 0.15900690853595734, -0.10489162802696228, 0.03584105521440506, 0.1058272123336792, -0.0371234193444252, 0.08047984540462494, 0.1403474062681198, 0.012080186977982521, -0.07801521569490433, 0.05236496031284332, 0.09886079281568527, -0.06052339822053909, -0.2511560320854187, -0.10817057639360428, -0.02858622744679451, 0.04679626226425171, 0.10632876306772232, 0.0701000913977623, -0.0016908721299842, 0.00956061389297247, -0.12195136398077011, -0.036927130073308945, -0.04108843579888344, 0.06217646598815918, 0.0294506773352623, -0.008985285647213459, 0.0520394891500473, -0.04774690791964531, 0.020905237644910812, 0.1316424012184143, 0.024416225031018257, 0.15991102159023285, -0.031706880778074265, 0.185417041182518, 0.08705003559589386, 0.07025816291570663, -0.033405181020498276, 0.05522313714027405, -0.01825946755707264, 0.06863582879304886, -0.022670255973935127, -0.10170457512140274, -0.031107906252145767, 0.09623127430677414, 0.044221289455890656, -0.060815468430519104, 0.04597046598792076, -0.07988341152667999, 0.052003294229507446, 0.2482767254114151, -0.02129625342786312, -0.11062446981668472, -0.04338635504245758, 0.06553354114294052, -0.04786114767193794, -0.08767332136631012, 0.002913267817348242, 0.09933259338140488, -0.1496422439813614, 0.004703711252659559, -0.036251265555620193, 0.08078978210687637, -0.11480078101158142, -0.026716552674770355, -0.03230198100209236, 0.041588470339775085, -0.015849819406867027, 0.10275092720985413, -0.13040900230407715, 0.08380087465047836, -0.013325253501534462, 0.01310153678059578, -0.0782511755824089, 0.06837872415781021, -0.01895046792924404, -0.03780735284090042, 0.1366121768951416, -0.006215611007064581, -0.10141000151634216, -0.04820798709988594, -0.1126590371131897, -0.01703418605029583, 0.051583416759967804, -0.11290252208709717, 0.10575294494628906, 0.017491739243268967, -0.0291488915681839, -0.04060416668653488, -0.01737435907125473, -0.09900261461734772, -0.22592374682426453, 0.093955859541893, -0.1498515009880066, 0.04200776293873787, -0.06019134819507599, -0.04922989010810852, -0.06437092274427414, 0.1163516417145729, -0.12195446342229843, -0.053604088723659515, -0.10975197702646255, -0.029022403061389923, 0.1595664918422699, -0.053790219128131866, 0.06331111490726471, -0.039245717227458954, 0.17322950065135956, -0.035065021365880966, -0.050289228558540344, 0.007532570976763964, -0.088021419942379, -0.19484159350395203, -0.053749553859233856, 0.11007185280323029, 0.06550134718418121, 0.0163425300270319, -0.017193596810102463, 0.03674657270312309, 0.020421823486685753, -0.09869395196437836, 0.03770819678902626, 0.12948788702487946, 0.12779344618320465, 0.05270049348473549, -0.024595456197857857, -0.10964267700910568, -0.09978162497282028, -0.11029967665672302, 0.05618448182940483, 0.18100367486476898, -0.06393714249134064, 0.1590956598520279, 0.13519911468029022, -0.10419189929962158, -0.20328426361083984, -0.07717861980199814, -0.0007497787592001259, -0.02211903966963291, 0.1225358247756958, -0.19815847277641296, 0.049581870436668396, 0.0789179801940918, -0.03115864284336567, 0.10763819515705109, -0.2878870964050293, -0.14170557260513306, 0.037551648914813995, 0.05232252553105354, -0.1949661672115326, -0.16820098459720612, -0.09622999280691147, -0.027172943577170372, -0.12288706004619598, 0.1268516182899475, -0.009032170288264751, 0.030158381909132004, -0.019785696640610695, 0.05683695152401924, 0.04500696808099747, -0.06860221922397614, 0.12934450805187225, -0.03207540884613991, 0.028608182445168495, -0.09666421264410019, -0.022259121760725975, -0.022572187706828117, -0.03799249976873398, 0.0661088079214096, 0.012703284621238708, 0.04630570486187935, -0.08131235837936401, -0.03107568994164467, -0.05197145789861679, 0.03758270666003227, -0.06423906981945038, -0.05736365169286728, -0.07201346755027771, 0.08422346413135529, 0.0915992334485054, -0.005051905754953623, 0.04886377602815628, -0.051412541419267654, 0.03208601474761963, 0.23568283021450043, 0.0845290794968605, 0.0497274287045002, -0.08804791420698166, -0.04172968491911888, -0.0136575223878026, 0.0021266858093440533, -0.0962950810790062, 0.050552625209093094, 0.08266878128051758, 0.03625945374369621, 0.09255232661962509, -0.01790240779519081, -0.1813916563987732, -0.000427464721724391, 0.0717129185795784, -0.09839699417352676, -0.2077929526567459, 0.04570891708135605, 0.11124752461910248, -0.11705417931079865, -0.08521920442581177, 0.08389610797166824, 0.019676465541124344, -0.03250337764620781, -0.0036249542608857155, 0.08092600107192993, 0.042852725833654404, 0.08864320814609528, 0.0005248318775556982, 0.042257532477378845, -0.06618160754442215, 0.0982535108923912, 0.1475435346364975, -0.1035580262541771, 0.0020980529952794313, 0.05650988593697548, -0.04561780393123627, -0.0664840117096901, -0.0021644122898578644, 0.032588835805654526, 0.017575843259692192, -0.034840475767850876, 0.003353205043822527, -0.049689941108226776, 0.07174351811408997, 0.15457333624362946, -0.01111795287579298, 0.053630098700523376, 0.013800472021102905, -0.0018682224908843637, -0.05426296219229698, 0.10150386393070221, 0.027481816709041595, 0.0433981828391552, -0.029508737847208977, 0.04365301504731178, 0.014778109267354012, -0.028735633939504623, 0.01692146435379982, -0.05270840972661972, -0.06898367404937744, 0.005813675932586193, -0.1883508414030075, 0.05465637519955635, -0.07569001615047455, -0.004358423408120871, -0.001444381196051836, -0.0064407652243971825, 0.00001836541559896432, 0.006695276126265526, -0.06934109330177307, -0.044898927211761475, -0.04890584200620651, 0.13076527416706085, -0.19275104999542236, 0.004856408573687077, 0.08468940854072571, -0.07136662304401398, 0.06035656854510307, -0.006609912030398846, -0.012020654045045376, 0.023412637412548065, -0.09310667961835861, -0.0019290173659101129, -0.03349117562174797, 0.06800557672977448, 0.014933085069060326, -0.1147676408290863, -0.010462973266839981, -0.007916547358036041, -0.08726866543292999, -0.006630753166973591, 0.03294210508465767, -0.14612355828285217, 0.08595027029514313, 0.09400727599859238, -0.046131957322359085, -0.03847058117389679, 0.036615513265132904, 0.03641119599342346, 0.006185981910675764, 0.09107682853937149, -0.020364001393318176, 0.036337874829769135, -0.1610560417175293, -0.036012738943099976, 0.0123906834051013, 0.006298923399299383, 0.03804029896855354, 0.006668363232165575, 0.025094248354434967, 0.0003334863285999745, 0.23804917931556702, 0.0033149165101349354, 0.021669598296284676, 0.026424942538142204, -0.02449706755578518, -0.03032245859503746, 0.04282417148351669, 0.00209734751842916, 0.0032397075556218624, 0.026861602440476418, 0.0007701481226831675, -0.04376424849033356, -0.05752922222018242, -0.011004442349076271, 0.08968443423509598, 0.12598368525505066, 0.17768025398254395, -0.03290683031082153, 0.0570237971842289, -0.16141623258590698, -0.04091881215572357, 0.0020107959862798452, -0.05335954949259758, 0.05500195547938347, -0.07286760956048965, 0.06016014888882637, 0.08536337316036224, -0.11229642480611801, 0.14883172512054443, -0.051181476563215256, -0.01668369583785534, -0.04846060276031494, -0.16502054035663605, -0.03735429048538208, 0.01827857829630375, 0.0022899925243109465, -0.09093476086854935, 0.12654545903205872, 0.11497621238231659, 0.0030075989197939634, -0.0014399226056411862, 0.06255926936864853, -0.08853204548358917, -0.04602070525288582, -0.029389001429080963, 0.00838522333651781, 0.019358718767762184, 0.011433654464781284, 0.054387014359235764, 0.006487702950835228, 0.048537109047174454, 0.06530921161174774, 0.10390862822532654, 0.03825461491942406, 0.02648325450718403, -0.031876128166913986, -0.047598037868738174, 0.004424404818564653, -0.03325256332755089, -0.06777964532375336, 0.20236389338970184, 0.0612640418112278, 0.021218182519078255, 0.0225434061139822, 0.21580973267555237, -0.017846910282969475, -0.05444350838661194, -0.1246228888630867, 0.14688275754451752, 0.00661061517894268, 0.03622955083847046, 0.036197155714035034, -0.1147540956735611, -0.002169343875721097, 0.1547408550977707, 0.1094217523932457, 0.022478820756077766, 0.011038033291697502, 0.04405940696597099, 0.022143205627799034, -0.025032754987478256, 0.055058564990758896, 0.0379582904279232, 0.22499053180217743, -0.0534818060696125, 0.0916227325797081, -0.014258782379329205, 0.003806092543527484, -0.04113144800066948, 0.11260561645030975, -0.05022848770022392, 0.0209500715136528, -0.06299251317977905, 0.07688000798225403, -0.06789954751729965, -0.24789558351039886, -0.003590330248698592, -0.069090835750103, -0.1326385885477066, -0.01048864796757698, 0.02455969527363777, -0.01859886944293976, 0.046385809779167175, 0.03799667954444885, -0.027643384411931038, 0.19087845087051392, 0.009191717021167278, -0.060809243470430374, -0.08655692636966705, 0.05663599073886871, -0.08870258927345276, 0.2842959761619568, 0.0012537384172901511, 0.041500236839056015, 0.07934119552373886, -0.018094826489686966, -0.12295585125684738, 0.0351959727704525, 0.08353020250797272, -0.05964553356170654, 0.046794433146715164, 0.1561935395002365, -0.023258160799741745, 0.14981412887573242, 0.024210115894675255, -0.011217871680855751, 0.07514333724975586, 0.06756481528282166, 0.034966807812452316, -0.07697886973619461, 0.07587262988090515, -0.09985201060771942, 0.12203319370746613, 0.11234799027442932, -0.014077394269406796, 0.01070841308683157, -0.05536764860153198, 0.06296124309301376, -0.05072730779647827, 0.13051144778728485, -0.01966637745499611, -0.14442600309848785, 0.04468132182955742, 0.005582737736403942, 0.0687272697687149, -0.2255195528268814, -0.057375092059373856, 0.09128037840127945, -0.04938775300979614, -0.008370601572096348, 0.07853373885154724, 0.03897712752223015, 0.021528879180550575, -0.05226527154445648, -0.1393492966890335, 0.023422149941325188, 0.11476585268974304, -0.060948360711336136, -0.03469223156571388 ]
a68b16ab48f8cd013e546291831b47d5c01ea87b
# Dataset Card for Evaluation run of AA051612/A0125 <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [AA051612/A0125](https://huggingface.co/AA051612/A0125) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_AA051612__A0125", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-01-25T17:35:27.963132](https://huggingface.co/datasets/open-llm-leaderboard/details_AA051612__A0125/blob/main/results_2024-01-25T17-35-27.963132.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.853843249835467, "acc_stderr": 0.023020303121600947, "acc_norm": 0.8632142115707717, "acc_norm_stderr": 0.023353666335398204, "mc1": 0.42472460220318237, "mc1_stderr": 0.01730400095716748, "mc2": 0.6026945746890666, "mc2_stderr": 0.015339327539715458 }, "harness|arc:challenge|25": { "acc": 0.6467576791808873, "acc_stderr": 0.013967822714840055, "acc_norm": 0.697098976109215, "acc_norm_stderr": 0.013428241573185347 }, "harness|hellaswag|10": { "acc": 0.6534554869547898, "acc_stderr": 0.004748965717214275, "acc_norm": 0.8500298745269866, "acc_norm_stderr": 0.0035631244274585 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.63, "acc_stderr": 0.048523658709391, "acc_norm": 0.63, "acc_norm_stderr": 0.048523658709391 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.8666666666666667, "acc_stderr": 0.029365879728106857, "acc_norm": 0.8666666666666667, "acc_norm_stderr": 0.029365879728106857 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.9276315789473685, "acc_stderr": 0.021085011261884105, "acc_norm": 0.9276315789473685, "acc_norm_stderr": 0.021085011261884105 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.83, "acc_stderr": 0.03775251680686371, "acc_norm": 0.83, "acc_norm_stderr": 0.03775251680686371 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.8981132075471698, "acc_stderr": 0.01861754975827669, "acc_norm": 0.8981132075471698, "acc_norm_stderr": 0.01861754975827669 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.9583333333333334, "acc_stderr": 0.016710315802959976, "acc_norm": 0.9583333333333334, "acc_norm_stderr": 0.016710315802959976 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.62, "acc_stderr": 0.048783173121456316, "acc_norm": 0.62, "acc_norm_stderr": 0.048783173121456316 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.84, "acc_stderr": 0.036845294917747094, "acc_norm": 0.84, "acc_norm_stderr": 0.036845294917747094 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.7, "acc_stderr": 0.046056618647183814, "acc_norm": 0.7, "acc_norm_stderr": 0.046056618647183814 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.8728323699421965, "acc_stderr": 0.025403262004794074, "acc_norm": 0.8728323699421965, "acc_norm_stderr": 0.025403262004794074 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.7352941176470589, "acc_stderr": 0.043898699568087785, "acc_norm": 0.7352941176470589, "acc_norm_stderr": 0.043898699568087785 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.86, "acc_stderr": 0.03487350880197771, "acc_norm": 0.86, "acc_norm_stderr": 0.03487350880197771 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.8808510638297873, "acc_stderr": 0.021178168405396817, "acc_norm": 0.8808510638297873, "acc_norm_stderr": 0.021178168405396817 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.7543859649122807, "acc_stderr": 0.0404933929774814, "acc_norm": 0.7543859649122807, "acc_norm_stderr": 0.0404933929774814 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.9172413793103448, "acc_stderr": 0.022959752132687583, "acc_norm": 0.9172413793103448, "acc_norm_stderr": 0.022959752132687583 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.8227513227513228, "acc_stderr": 0.019667770001273677, "acc_norm": 0.8227513227513228, "acc_norm_stderr": 0.019667770001273677 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.6031746031746031, "acc_stderr": 0.043758884927270585, "acc_norm": 0.6031746031746031, "acc_norm_stderr": 0.043758884927270585 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.67, "acc_stderr": 0.047258156262526094, "acc_norm": 0.67, "acc_norm_stderr": 0.047258156262526094 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.9419354838709677, "acc_stderr": 0.01330413811280927, "acc_norm": 0.9419354838709677, "acc_norm_stderr": 0.01330413811280927 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.8078817733990148, "acc_stderr": 0.02771931570961478, "acc_norm": 0.8078817733990148, "acc_norm_stderr": 0.02771931570961478 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.87, "acc_stderr": 0.03379976689896309, "acc_norm": 0.87, "acc_norm_stderr": 0.03379976689896309 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.9575757575757575, "acc_stderr": 0.01573880284887258, "acc_norm": 0.9575757575757575, "acc_norm_stderr": 0.01573880284887258 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.9747474747474747, "acc_stderr": 0.01117803212271851, "acc_norm": 0.9747474747474747, "acc_norm_stderr": 0.01117803212271851 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.9792746113989638, "acc_stderr": 0.010281417011909029, "acc_norm": 0.9792746113989638, "acc_norm_stderr": 0.010281417011909029 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.8923076923076924, "acc_stderr": 0.015717188416273085, "acc_norm": 0.8923076923076924, "acc_norm_stderr": 0.015717188416273085 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.7185185185185186, "acc_stderr": 0.027420019350945277, "acc_norm": 0.7185185185185186, "acc_norm_stderr": 0.027420019350945277 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.9411764705882353, "acc_stderr": 0.015283995352038414, "acc_norm": 0.9411764705882353, "acc_norm_stderr": 0.015283995352038414 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.6887417218543046, "acc_stderr": 0.03780445850526733, "acc_norm": 0.6887417218543046, "acc_norm_stderr": 0.03780445850526733 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.9577981651376147, "acc_stderr": 0.008619919645779343, "acc_norm": 0.9577981651376147, "acc_norm_stderr": 0.008619919645779343 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.8240740740740741, "acc_stderr": 0.025967420958258533, "acc_norm": 0.8240740740740741, "acc_norm_stderr": 0.025967420958258533 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.9754901960784313, "acc_stderr": 0.010852588947505647, "acc_norm": 0.9754901960784313, "acc_norm_stderr": 0.010852588947505647 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.9662447257383966, "acc_stderr": 0.011755967781486706, "acc_norm": 0.9662447257383966, "acc_norm_stderr": 0.011755967781486706 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.8923766816143498, "acc_stderr": 0.020799400082880008, "acc_norm": 0.8923766816143498, "acc_norm_stderr": 0.020799400082880008 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.9312977099236641, "acc_stderr": 0.022184936922745042, "acc_norm": 0.9312977099236641, "acc_norm_stderr": 0.022184936922745042 }, "harness|hendrycksTest-international_law|5": { "acc": 0.9504132231404959, "acc_stderr": 0.01981748563352363, "acc_norm": 0.9504132231404959, "acc_norm_stderr": 0.01981748563352363 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.9351851851851852, "acc_stderr": 0.023800937426629205, "acc_norm": 0.9351851851851852, "acc_norm_stderr": 0.023800937426629205 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.9447852760736196, "acc_stderr": 0.017944712448654625, "acc_norm": 0.9447852760736196, "acc_norm_stderr": 0.017944712448654625 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.7678571428571429, "acc_stderr": 0.04007341809755806, "acc_norm": 0.7678571428571429, "acc_norm_stderr": 0.04007341809755806 }, "harness|hendrycksTest-management|5": { "acc": 0.9223300970873787, "acc_stderr": 0.026501440784762752, "acc_norm": 0.9223300970873787, "acc_norm_stderr": 0.026501440784762752 }, "harness|hendrycksTest-marketing|5": { "acc": 0.9743589743589743, "acc_stderr": 0.010354979197709018, "acc_norm": 0.9743589743589743, "acc_norm_stderr": 0.010354979197709018 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.91, "acc_stderr": 0.028762349126466143, "acc_norm": 0.91, "acc_norm_stderr": 0.028762349126466143 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.9540229885057471, "acc_stderr": 0.00748938941749882, "acc_norm": 0.9540229885057471, "acc_norm_stderr": 0.00748938941749882 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.8641618497109826, "acc_stderr": 0.01844585866997476, "acc_norm": 0.8641618497109826, "acc_norm_stderr": 0.01844585866997476 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.8927374301675978, "acc_stderr": 0.010349448587660392, "acc_norm": 0.8927374301675978, "acc_norm_stderr": 0.010349448587660392 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.9215686274509803, "acc_stderr": 0.015394260411062108, "acc_norm": 0.9215686274509803, "acc_norm_stderr": 0.015394260411062108 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.9035369774919614, "acc_stderr": 0.016767663560541792, "acc_norm": 0.9035369774919614, "acc_norm_stderr": 0.016767663560541792 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.9290123456790124, "acc_stderr": 0.014288969013346262, "acc_norm": 0.9290123456790124, "acc_norm_stderr": 0.014288969013346262 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.8014184397163121, "acc_stderr": 0.02379830163794211, "acc_norm": 0.8014184397163121, "acc_norm_stderr": 0.02379830163794211 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.8526727509778357, "acc_stderr": 0.009052356615022185, "acc_norm": 0.8526727509778357, "acc_norm_stderr": 0.009052356615022185 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.9448529411764706, "acc_stderr": 0.01386623773079069, "acc_norm": 0.9448529411764706, "acc_norm_stderr": 0.01386623773079069 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.9150326797385621, "acc_stderr": 0.011280378134152863, "acc_norm": 0.9150326797385621, "acc_norm_stderr": 0.011280378134152863 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.8363636363636363, "acc_stderr": 0.03543433054298676, "acc_norm": 0.8363636363636363, "acc_norm_stderr": 0.03543433054298676 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.9020408163265307, "acc_stderr": 0.019030086404305015, "acc_norm": 0.9020408163265307, "acc_norm_stderr": 0.019030086404305015 }, "harness|hendrycksTest-sociology|5": { "acc": 0.9651741293532339, "acc_stderr": 0.012963994249547642, "acc_norm": 0.9651741293532339, "acc_norm_stderr": 0.012963994249547642 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.96, "acc_stderr": 0.01969463855669321, "acc_norm": 0.96, "acc_norm_stderr": 0.01969463855669321 }, "harness|hendrycksTest-virology|5": { "acc": 0.6867469879518072, "acc_stderr": 0.03610805018031025, "acc_norm": 0.6867469879518072, "acc_norm_stderr": 0.03610805018031025 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.9415204678362573, "acc_stderr": 0.017996678857280134, "acc_norm": 0.9415204678362573, "acc_norm_stderr": 0.017996678857280134 }, "harness|truthfulqa:mc|0": { "mc1": 0.42472460220318237, "mc1_stderr": 0.01730400095716748, "mc2": 0.6026945746890666, "mc2_stderr": 0.015339327539715458 }, "harness|winogrande|5": { "acc": 0.8050513022888713, "acc_stderr": 0.011134099415938273 }, "harness|gsm8k|5": { "acc": 0.5966641394996209, "acc_stderr": 0.013512654781814706 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_AA051612__A0125
[ "region:us" ]
2024-01-25T17:37:41+00:00
{"pretty_name": "Evaluation run of AA051612/A0125", "dataset_summary": "Dataset automatically created during the evaluation run of model [AA051612/A0125](https://huggingface.co/AA051612/A0125) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_AA051612__A0125\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-01-25T17:35:27.963132](https://huggingface.co/datasets/open-llm-leaderboard/details_AA051612__A0125/blob/main/results_2024-01-25T17-35-27.963132.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.853843249835467,\n \"acc_stderr\": 0.023020303121600947,\n \"acc_norm\": 0.8632142115707717,\n \"acc_norm_stderr\": 0.023353666335398204,\n \"mc1\": 0.42472460220318237,\n \"mc1_stderr\": 0.01730400095716748,\n \"mc2\": 0.6026945746890666,\n \"mc2_stderr\": 0.015339327539715458\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.6467576791808873,\n \"acc_stderr\": 0.013967822714840055,\n \"acc_norm\": 0.697098976109215,\n \"acc_norm_stderr\": 0.013428241573185347\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6534554869547898,\n \"acc_stderr\": 0.004748965717214275,\n \"acc_norm\": 0.8500298745269866,\n \"acc_norm_stderr\": 0.0035631244274585\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.63,\n \"acc_stderr\": 0.048523658709391,\n \"acc_norm\": 0.63,\n \"acc_norm_stderr\": 0.048523658709391\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.8666666666666667,\n \"acc_stderr\": 0.029365879728106857,\n \"acc_norm\": 0.8666666666666667,\n \"acc_norm_stderr\": 0.029365879728106857\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.9276315789473685,\n \"acc_stderr\": 0.021085011261884105,\n \"acc_norm\": 0.9276315789473685,\n \"acc_norm_stderr\": 0.021085011261884105\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.83,\n \"acc_stderr\": 0.03775251680686371,\n \"acc_norm\": 0.83,\n \"acc_norm_stderr\": 0.03775251680686371\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.8981132075471698,\n \"acc_stderr\": 0.01861754975827669,\n \"acc_norm\": 0.8981132075471698,\n \"acc_norm_stderr\": 0.01861754975827669\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.9583333333333334,\n \"acc_stderr\": 0.016710315802959976,\n \"acc_norm\": 0.9583333333333334,\n \"acc_norm_stderr\": 0.016710315802959976\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.62,\n \"acc_stderr\": 0.048783173121456316,\n \"acc_norm\": 0.62,\n \"acc_norm_stderr\": 0.048783173121456316\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.84,\n \"acc_stderr\": 0.036845294917747094,\n \"acc_norm\": 0.84,\n \"acc_norm_stderr\": 0.036845294917747094\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.7,\n \"acc_stderr\": 0.046056618647183814,\n \"acc_norm\": 0.7,\n \"acc_norm_stderr\": 0.046056618647183814\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.8728323699421965,\n \"acc_stderr\": 0.025403262004794074,\n \"acc_norm\": 0.8728323699421965,\n \"acc_norm_stderr\": 0.025403262004794074\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.7352941176470589,\n \"acc_stderr\": 0.043898699568087785,\n \"acc_norm\": 0.7352941176470589,\n \"acc_norm_stderr\": 0.043898699568087785\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.86,\n \"acc_stderr\": 0.03487350880197771,\n \"acc_norm\": 0.86,\n \"acc_norm_stderr\": 0.03487350880197771\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.8808510638297873,\n \"acc_stderr\": 0.021178168405396817,\n \"acc_norm\": 0.8808510638297873,\n \"acc_norm_stderr\": 0.021178168405396817\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.7543859649122807,\n \"acc_stderr\": 0.0404933929774814,\n \"acc_norm\": 0.7543859649122807,\n \"acc_norm_stderr\": 0.0404933929774814\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.9172413793103448,\n \"acc_stderr\": 0.022959752132687583,\n \"acc_norm\": 0.9172413793103448,\n \"acc_norm_stderr\": 0.022959752132687583\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.8227513227513228,\n \"acc_stderr\": 0.019667770001273677,\n \"acc_norm\": 0.8227513227513228,\n \"acc_norm_stderr\": 0.019667770001273677\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.6031746031746031,\n \"acc_stderr\": 0.043758884927270585,\n \"acc_norm\": 0.6031746031746031,\n \"acc_norm_stderr\": 0.043758884927270585\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.67,\n \"acc_stderr\": 0.047258156262526094,\n \"acc_norm\": 0.67,\n \"acc_norm_stderr\": 0.047258156262526094\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.9419354838709677,\n \"acc_stderr\": 0.01330413811280927,\n \"acc_norm\": 0.9419354838709677,\n \"acc_norm_stderr\": 0.01330413811280927\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.8078817733990148,\n \"acc_stderr\": 0.02771931570961478,\n \"acc_norm\": 0.8078817733990148,\n \"acc_norm_stderr\": 0.02771931570961478\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.87,\n \"acc_stderr\": 0.03379976689896309,\n \"acc_norm\": 0.87,\n \"acc_norm_stderr\": 0.03379976689896309\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.9575757575757575,\n \"acc_stderr\": 0.01573880284887258,\n \"acc_norm\": 0.9575757575757575,\n \"acc_norm_stderr\": 0.01573880284887258\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.9747474747474747,\n \"acc_stderr\": 0.01117803212271851,\n \"acc_norm\": 0.9747474747474747,\n \"acc_norm_stderr\": 0.01117803212271851\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.9792746113989638,\n \"acc_stderr\": 0.010281417011909029,\n \"acc_norm\": 0.9792746113989638,\n \"acc_norm_stderr\": 0.010281417011909029\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.8923076923076924,\n \"acc_stderr\": 0.015717188416273085,\n \"acc_norm\": 0.8923076923076924,\n \"acc_norm_stderr\": 0.015717188416273085\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.7185185185185186,\n \"acc_stderr\": 0.027420019350945277,\n \"acc_norm\": 0.7185185185185186,\n \"acc_norm_stderr\": 0.027420019350945277\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.9411764705882353,\n \"acc_stderr\": 0.015283995352038414,\n \"acc_norm\": 0.9411764705882353,\n \"acc_norm_stderr\": 0.015283995352038414\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.6887417218543046,\n \"acc_stderr\": 0.03780445850526733,\n \"acc_norm\": 0.6887417218543046,\n \"acc_norm_stderr\": 0.03780445850526733\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.9577981651376147,\n \"acc_stderr\": 0.008619919645779343,\n \"acc_norm\": 0.9577981651376147,\n \"acc_norm_stderr\": 0.008619919645779343\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.8240740740740741,\n \"acc_stderr\": 0.025967420958258533,\n \"acc_norm\": 0.8240740740740741,\n \"acc_norm_stderr\": 0.025967420958258533\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.9754901960784313,\n \"acc_stderr\": 0.010852588947505647,\n \"acc_norm\": 0.9754901960784313,\n \"acc_norm_stderr\": 0.010852588947505647\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.9662447257383966,\n \"acc_stderr\": 0.011755967781486706,\n \"acc_norm\": 0.9662447257383966,\n \"acc_norm_stderr\": 0.011755967781486706\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.8923766816143498,\n \"acc_stderr\": 0.020799400082880008,\n \"acc_norm\": 0.8923766816143498,\n \"acc_norm_stderr\": 0.020799400082880008\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.9312977099236641,\n \"acc_stderr\": 0.022184936922745042,\n \"acc_norm\": 0.9312977099236641,\n \"acc_norm_stderr\": 0.022184936922745042\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.9504132231404959,\n \"acc_stderr\": 0.01981748563352363,\n \"acc_norm\": 0.9504132231404959,\n \"acc_norm_stderr\": 0.01981748563352363\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.9351851851851852,\n \"acc_stderr\": 0.023800937426629205,\n \"acc_norm\": 0.9351851851851852,\n \"acc_norm_stderr\": 0.023800937426629205\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.9447852760736196,\n \"acc_stderr\": 0.017944712448654625,\n \"acc_norm\": 0.9447852760736196,\n \"acc_norm_stderr\": 0.017944712448654625\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.7678571428571429,\n \"acc_stderr\": 0.04007341809755806,\n \"acc_norm\": 0.7678571428571429,\n \"acc_norm_stderr\": 0.04007341809755806\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.9223300970873787,\n \"acc_stderr\": 0.026501440784762752,\n \"acc_norm\": 0.9223300970873787,\n \"acc_norm_stderr\": 0.026501440784762752\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.9743589743589743,\n \"acc_stderr\": 0.010354979197709018,\n \"acc_norm\": 0.9743589743589743,\n \"acc_norm_stderr\": 0.010354979197709018\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.91,\n \"acc_stderr\": 0.028762349126466143,\n \"acc_norm\": 0.91,\n \"acc_norm_stderr\": 0.028762349126466143\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.9540229885057471,\n \"acc_stderr\": 0.00748938941749882,\n \"acc_norm\": 0.9540229885057471,\n \"acc_norm_stderr\": 0.00748938941749882\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.8641618497109826,\n \"acc_stderr\": 0.01844585866997476,\n \"acc_norm\": 0.8641618497109826,\n \"acc_norm_stderr\": 0.01844585866997476\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.8927374301675978,\n \"acc_stderr\": 0.010349448587660392,\n \"acc_norm\": 0.8927374301675978,\n \"acc_norm_stderr\": 0.010349448587660392\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.9215686274509803,\n \"acc_stderr\": 0.015394260411062108,\n \"acc_norm\": 0.9215686274509803,\n \"acc_norm_stderr\": 0.015394260411062108\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.9035369774919614,\n \"acc_stderr\": 0.016767663560541792,\n \"acc_norm\": 0.9035369774919614,\n \"acc_norm_stderr\": 0.016767663560541792\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.9290123456790124,\n \"acc_stderr\": 0.014288969013346262,\n \"acc_norm\": 0.9290123456790124,\n \"acc_norm_stderr\": 0.014288969013346262\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.8014184397163121,\n \"acc_stderr\": 0.02379830163794211,\n \"acc_norm\": 0.8014184397163121,\n \"acc_norm_stderr\": 0.02379830163794211\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.8526727509778357,\n \"acc_stderr\": 0.009052356615022185,\n \"acc_norm\": 0.8526727509778357,\n \"acc_norm_stderr\": 0.009052356615022185\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.9448529411764706,\n \"acc_stderr\": 0.01386623773079069,\n \"acc_norm\": 0.9448529411764706,\n \"acc_norm_stderr\": 0.01386623773079069\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.9150326797385621,\n \"acc_stderr\": 0.011280378134152863,\n \"acc_norm\": 0.9150326797385621,\n \"acc_norm_stderr\": 0.011280378134152863\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.8363636363636363,\n \"acc_stderr\": 0.03543433054298676,\n \"acc_norm\": 0.8363636363636363,\n \"acc_norm_stderr\": 0.03543433054298676\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.9020408163265307,\n \"acc_stderr\": 0.019030086404305015,\n \"acc_norm\": 0.9020408163265307,\n \"acc_norm_stderr\": 0.019030086404305015\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.9651741293532339,\n \"acc_stderr\": 0.012963994249547642,\n \"acc_norm\": 0.9651741293532339,\n \"acc_norm_stderr\": 0.012963994249547642\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.96,\n \"acc_stderr\": 0.01969463855669321,\n \"acc_norm\": 0.96,\n \"acc_norm_stderr\": 0.01969463855669321\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.6867469879518072,\n \"acc_stderr\": 0.03610805018031025,\n \"acc_norm\": 0.6867469879518072,\n \"acc_norm_stderr\": 0.03610805018031025\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.9415204678362573,\n \"acc_stderr\": 0.017996678857280134,\n \"acc_norm\": 0.9415204678362573,\n \"acc_norm_stderr\": 0.017996678857280134\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.42472460220318237,\n \"mc1_stderr\": 0.01730400095716748,\n \"mc2\": 0.6026945746890666,\n \"mc2_stderr\": 0.015339327539715458\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.8050513022888713,\n \"acc_stderr\": 0.011134099415938273\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.5966641394996209,\n \"acc_stderr\": 0.013512654781814706\n }\n}\n```", "repo_url": "https://huggingface.co/AA051612/A0125", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_01_25T17_35_27.963132", "path": ["**/details_harness|arc:challenge|25_2024-01-25T17-35-27.963132.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-01-25T17-35-27.963132.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_01_25T17_35_27.963132", "path": ["**/details_harness|gsm8k|5_2024-01-25T17-35-27.963132.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-01-25T17-35-27.963132.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_01_25T17_35_27.963132", "path": ["**/details_harness|hellaswag|10_2024-01-25T17-35-27.963132.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-01-25T17-35-27.963132.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_01_25T17_35_27.963132", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-25T17-35-27.963132.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-25T17-35-27.963132.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-25T17-35-27.963132.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-25T17-35-27.963132.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-25T17-35-27.963132.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-25T17-35-27.963132.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-25T17-35-27.963132.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-25T17-35-27.963132.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-25T17-35-27.963132.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-25T17-35-27.963132.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-25T17-35-27.963132.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-25T17-35-27.963132.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-25T17-35-27.963132.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-25T17-35-27.963132.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-25T17-35-27.963132.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-25T17-35-27.963132.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-25T17-35-27.963132.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-25T17-35-27.963132.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-25T17-35-27.963132.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-25T17-35-27.963132.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-25T17-35-27.963132.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-25T17-35-27.963132.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-25T17-35-27.963132.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-25T17-35-27.963132.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-25T17-35-27.963132.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-25T17-35-27.963132.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-25T17-35-27.963132.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-25T17-35-27.963132.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-25T17-35-27.963132.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-25T17-35-27.963132.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-25T17-35-27.963132.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-25T17-35-27.963132.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-25T17-35-27.963132.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-25T17-35-27.963132.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-25T17-35-27.963132.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-25T17-35-27.963132.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-25T17-35-27.963132.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-25T17-35-27.963132.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-25T17-35-27.963132.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-25T17-35-27.963132.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-25T17-35-27.963132.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-25T17-35-27.963132.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-25T17-35-27.963132.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-25T17-35-27.963132.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-25T17-35-27.963132.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-25T17-35-27.963132.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-25T17-35-27.963132.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-25T17-35-27.963132.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-25T17-35-27.963132.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-25T17-35-27.963132.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-25T17-35-27.963132.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-25T17-35-27.963132.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-25T17-35-27.963132.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-25T17-35-27.963132.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-25T17-35-27.963132.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-25T17-35-27.963132.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-25T17-35-27.963132.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-25T17-35-27.963132.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-25T17-35-27.963132.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-25T17-35-27.963132.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-25T17-35-27.963132.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-25T17-35-27.963132.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-25T17-35-27.963132.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-25T17-35-27.963132.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-25T17-35-27.963132.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-25T17-35-27.963132.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-25T17-35-27.963132.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-25T17-35-27.963132.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-25T17-35-27.963132.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-25T17-35-27.963132.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-25T17-35-27.963132.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-25T17-35-27.963132.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-25T17-35-27.963132.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-25T17-35-27.963132.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-25T17-35-27.963132.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-25T17-35-27.963132.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-25T17-35-27.963132.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-25T17-35-27.963132.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-25T17-35-27.963132.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-25T17-35-27.963132.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-25T17-35-27.963132.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-25T17-35-27.963132.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-25T17-35-27.963132.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-25T17-35-27.963132.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-25T17-35-27.963132.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-25T17-35-27.963132.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-25T17-35-27.963132.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-25T17-35-27.963132.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-25T17-35-27.963132.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-25T17-35-27.963132.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-25T17-35-27.963132.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-25T17-35-27.963132.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-25T17-35-27.963132.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-25T17-35-27.963132.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-25T17-35-27.963132.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-25T17-35-27.963132.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-25T17-35-27.963132.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-25T17-35-27.963132.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-25T17-35-27.963132.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-25T17-35-27.963132.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-25T17-35-27.963132.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-25T17-35-27.963132.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-25T17-35-27.963132.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-25T17-35-27.963132.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-25T17-35-27.963132.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-25T17-35-27.963132.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-25T17-35-27.963132.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-25T17-35-27.963132.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-25T17-35-27.963132.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-25T17-35-27.963132.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-25T17-35-27.963132.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-25T17-35-27.963132.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-25T17-35-27.963132.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-25T17-35-27.963132.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_01_25T17_35_27.963132", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-25T17-35-27.963132.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-25T17-35-27.963132.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_01_25T17_35_27.963132", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-25T17-35-27.963132.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-25T17-35-27.963132.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_01_25T17_35_27.963132", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-25T17-35-27.963132.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-25T17-35-27.963132.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_01_25T17_35_27.963132", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-25T17-35-27.963132.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-25T17-35-27.963132.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_01_25T17_35_27.963132", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-25T17-35-27.963132.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-25T17-35-27.963132.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_01_25T17_35_27.963132", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-25T17-35-27.963132.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-25T17-35-27.963132.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_01_25T17_35_27.963132", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-25T17-35-27.963132.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-25T17-35-27.963132.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_01_25T17_35_27.963132", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-25T17-35-27.963132.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-25T17-35-27.963132.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_01_25T17_35_27.963132", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-25T17-35-27.963132.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-25T17-35-27.963132.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_01_25T17_35_27.963132", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-25T17-35-27.963132.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-25T17-35-27.963132.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_01_25T17_35_27.963132", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-25T17-35-27.963132.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-25T17-35-27.963132.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_01_25T17_35_27.963132", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-25T17-35-27.963132.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-25T17-35-27.963132.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_01_25T17_35_27.963132", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-25T17-35-27.963132.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-25T17-35-27.963132.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_01_25T17_35_27.963132", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-25T17-35-27.963132.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-25T17-35-27.963132.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_01_25T17_35_27.963132", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-25T17-35-27.963132.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-25T17-35-27.963132.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_01_25T17_35_27.963132", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-25T17-35-27.963132.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-25T17-35-27.963132.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_01_25T17_35_27.963132", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-25T17-35-27.963132.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-25T17-35-27.963132.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_01_25T17_35_27.963132", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-25T17-35-27.963132.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-25T17-35-27.963132.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_01_25T17_35_27.963132", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-25T17-35-27.963132.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-25T17-35-27.963132.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_01_25T17_35_27.963132", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-25T17-35-27.963132.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-25T17-35-27.963132.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_01_25T17_35_27.963132", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-25T17-35-27.963132.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-25T17-35-27.963132.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_01_25T17_35_27.963132", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-25T17-35-27.963132.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-25T17-35-27.963132.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_01_25T17_35_27.963132", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-25T17-35-27.963132.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-25T17-35-27.963132.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_01_25T17_35_27.963132", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-25T17-35-27.963132.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-25T17-35-27.963132.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_01_25T17_35_27.963132", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-25T17-35-27.963132.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-25T17-35-27.963132.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_01_25T17_35_27.963132", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-25T17-35-27.963132.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-25T17-35-27.963132.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_01_25T17_35_27.963132", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-25T17-35-27.963132.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-25T17-35-27.963132.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_01_25T17_35_27.963132", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-25T17-35-27.963132.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-25T17-35-27.963132.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_01_25T17_35_27.963132", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-25T17-35-27.963132.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-25T17-35-27.963132.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_01_25T17_35_27.963132", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-25T17-35-27.963132.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-25T17-35-27.963132.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_01_25T17_35_27.963132", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-25T17-35-27.963132.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-25T17-35-27.963132.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_01_25T17_35_27.963132", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-25T17-35-27.963132.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-25T17-35-27.963132.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_01_25T17_35_27.963132", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-25T17-35-27.963132.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-25T17-35-27.963132.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_01_25T17_35_27.963132", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-25T17-35-27.963132.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-25T17-35-27.963132.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_01_25T17_35_27.963132", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-25T17-35-27.963132.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-25T17-35-27.963132.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_01_25T17_35_27.963132", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-25T17-35-27.963132.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-25T17-35-27.963132.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_01_25T17_35_27.963132", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-25T17-35-27.963132.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-25T17-35-27.963132.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_01_25T17_35_27.963132", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-25T17-35-27.963132.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-25T17-35-27.963132.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_01_25T17_35_27.963132", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-25T17-35-27.963132.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-25T17-35-27.963132.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_01_25T17_35_27.963132", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-25T17-35-27.963132.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-25T17-35-27.963132.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_01_25T17_35_27.963132", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-25T17-35-27.963132.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-25T17-35-27.963132.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_01_25T17_35_27.963132", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-25T17-35-27.963132.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-25T17-35-27.963132.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_01_25T17_35_27.963132", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-25T17-35-27.963132.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-25T17-35-27.963132.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_01_25T17_35_27.963132", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-25T17-35-27.963132.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-25T17-35-27.963132.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_01_25T17_35_27.963132", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-25T17-35-27.963132.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-25T17-35-27.963132.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_01_25T17_35_27.963132", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-25T17-35-27.963132.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-25T17-35-27.963132.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_01_25T17_35_27.963132", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-25T17-35-27.963132.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-25T17-35-27.963132.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_01_25T17_35_27.963132", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-25T17-35-27.963132.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-25T17-35-27.963132.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_01_25T17_35_27.963132", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-25T17-35-27.963132.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-25T17-35-27.963132.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_01_25T17_35_27.963132", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-25T17-35-27.963132.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-25T17-35-27.963132.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_01_25T17_35_27.963132", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-25T17-35-27.963132.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-25T17-35-27.963132.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_01_25T17_35_27.963132", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-25T17-35-27.963132.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-25T17-35-27.963132.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_01_25T17_35_27.963132", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-25T17-35-27.963132.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-25T17-35-27.963132.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_01_25T17_35_27.963132", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-25T17-35-27.963132.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-25T17-35-27.963132.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_01_25T17_35_27.963132", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-25T17-35-27.963132.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-25T17-35-27.963132.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_01_25T17_35_27.963132", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-25T17-35-27.963132.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-25T17-35-27.963132.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_01_25T17_35_27.963132", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-25T17-35-27.963132.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-25T17-35-27.963132.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_01_25T17_35_27.963132", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-25T17-35-27.963132.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-25T17-35-27.963132.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_01_25T17_35_27.963132", "path": ["**/details_harness|winogrande|5_2024-01-25T17-35-27.963132.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-01-25T17-35-27.963132.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_01_25T17_35_27.963132", "path": ["results_2024-01-25T17-35-27.963132.parquet"]}, {"split": "latest", "path": ["results_2024-01-25T17-35-27.963132.parquet"]}]}]}
2024-01-25T17:38:03+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of AA051612/A0125 Dataset automatically created during the evaluation run of model AA051612/A0125 on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-01-25T17:35:27.963132(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of AA051612/A0125\n\n\n\nDataset automatically created during the evaluation run of model AA051612/A0125 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-25T17:35:27.963132(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of AA051612/A0125\n\n\n\nDataset automatically created during the evaluation run of model AA051612/A0125 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-25T17:35:27.963132(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ 6, 175, 68, 4, 40, 29, 3, 4, 9, 6, 5, 7, 4, 7, 10, 9, 5, 9, 8, 10, 46, 8, 7, 10, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of AA051612/A0125\n\n\n\nDataset automatically created during the evaluation run of model AA051612/A0125 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2024-01-25T17:35:27.963132(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):## Dataset Details### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:## Uses### Direct Use### Out-of-Scope Use## Dataset Structure## Dataset Creation### Curation Rationale### Source Data#### Data Collection and Processing#### Who are the source data producers?### Annotations [optional]#### Annotation process#### Who are the annotators?#### Personal and Sensitive Information## Bias, Risks, and Limitations### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:## Glossary [optional]## More Information [optional]## Dataset Card Authors [optional]## Dataset Card Contact" ]
[ -0.0585547499358654, 0.18442709743976593, -0.00663952250033617, 0.035594213753938675, 0.05860947445034981, 0.0070253899320960045, 0.059285666793584824, 0.11301208287477493, 0.03384333848953247, 0.15427902340888977, 0.011304997839033604, 0.10393115878105164, 0.11053825169801712, 0.12270486354827881, 0.011640781536698341, -0.13831490278244019, 0.0571211539208889, -0.07854854315519333, 0.10314593464136124, 0.07722723484039307, 0.077605240046978, -0.0796419307589531, 0.05614931508898735, -0.03403089940547943, 0.007118558045476675, -0.009999857284128666, -0.052325524389743805, -0.04784518852829933, 0.10250381380319595, 0.06950937211513519, 0.046973489224910736, -0.026746610179543495, 0.02179837040603161, -0.28535211086273193, 0.020639942958950996, 0.10509053617715836, -0.0006371984491124749, 0.05570761114358902, 0.14700335264205933, -0.09036074578762054, 0.10787325352430344, -0.027339085936546326, 0.07611988484859467, 0.04770583286881447, -0.13555927574634552, -0.16472066938877106, -0.15566784143447876, 0.019352152943611145, 0.07158302515745163, 0.04178560525178909, -0.024815799668431282, 0.0984184667468071, -0.04277294874191284, 0.05177632346749306, 0.15201519429683685, -0.1829034686088562, -0.025372125208377838, 0.05239153280854225, 0.020022043958306313, 0.04530053958296776, -0.08557899296283722, -0.03236991912126541, 0.03266949579119682, 0.050999853760004044, 0.009233509190380573, 0.010355167090892792, 0.06740505248308182, 0.016934027895331383, -0.14275093376636505, -0.13575249910354614, 0.1093887984752655, -0.017307095229625702, -0.04549873620271683, -0.16527459025382996, -0.0550706647336483, -0.03882472589612007, 0.016063885763287544, 0.020846210420131683, 0.015811799094080925, 0.00018743801047094166, 0.06675217300653458, 0.002083771163597703, -0.08814723044633865, -0.044236939400434494, -0.04832698404788971, 0.023067785426974297, 0.03653009980916977, 0.0036408782470971346, -0.003699373686686158, 0.14330297708511353, 0.027905462309718132, -0.06864476203918457, -0.09905470162630081, -0.04180813580751419, -0.1258390247821808, -0.03727750480175018, 0.02068931609392166, -0.044967811554670334, 0.04431283101439476, 0.2441171556711197, -0.05122624337673187, 0.03681153431534767, -0.08371920138597488, 0.022099114954471588, 0.11114282160997391, 0.08232283592224121, -0.0535915233194828, -0.07217180728912354, -0.05297550931572914, 0.022225147113204002, 0.026083534583449364, -0.03264787420630455, 0.03222595155239105, 0.0675799548625946, 0.03045196831226349, 0.13228146731853485, 0.12015125900506973, 0.018705738708376884, -0.07494128495454788, -0.013343674130737782, 0.11958380043506622, -0.18239843845367432, 0.006616492755711079, 0.024206576868891716, -0.036561816930770874, -0.07511280477046967, 0.06051704287528992, -0.029237279668450356, -0.07864970713853836, 0.10344894230365753, -0.06327662616968155, -0.05758407711982727, -0.1111508160829544, -0.0744805634021759, 0.03976762294769287, -0.032945238053798676, -0.06668246537446976, -0.04160115122795105, -0.15155991911888123, -0.08832844346761703, 0.03190591558814049, -0.07540556788444519, -0.013513227924704552, 0.0005827618879266083, 0.02426326461136341, -0.023814652115106583, -0.008711503818631172, 0.10088876634836197, -0.07427037507295609, 0.04481487348675728, -0.03629697859287262, 0.05051158368587494, 0.0950559750199318, 0.03328745439648628, -0.134152352809906, 0.09554128348827362, -0.08035245537757874, 0.10442384332418442, -0.07473021745681763, -0.029008297249674797, -0.11867690831422806, 0.0056702797301113605, -0.015402543358504772, 0.01862960495054722, -0.007088520564138889, 0.08726312220096588, -0.2213268131017685, -0.01222533080726862, 0.18991348147392273, -0.13654467463493347, -0.06825800985097885, 0.055274851620197296, -0.030209098011255264, 0.05325324088335037, 0.05037757009267807, 0.0691705197095871, 0.07899846136569977, -0.058424994349479675, -0.09894305467605591, -0.06534678488969803, -0.040494974702596664, 0.14091642200946808, 0.06299430131912231, -0.09050460904836655, 0.09081089496612549, 0.02299457974731922, 0.0017895617056638002, -0.061925772577524185, -0.017993463203310966, -0.05932492017745972, 0.002364593558013439, -0.010849210433661938, -0.09605273604393005, -0.02130655199289322, -0.10975125432014465, -0.014647969976067543, -0.06370388716459274, 0.005065031815320253, 0.09471162408590317, -0.03208348900079727, 0.025540413334965706, -0.09469636529684067, 0.08067052811384201, -0.01300421729683876, 0.020039131864905357, -0.2107890099287033, -0.08430427312850952, 0.022675277665257454, -0.16773244738578796, 0.04556290805339813, 0.009787056595087051, 0.013020792044699192, 0.0441419817507267, 0.002509849611669779, 0.018541507422924042, 0.01842958852648735, -0.017541727051138878, -0.00015451096987817436, -0.15957315266132355, -0.034438442438840866, -0.06687574833631516, 0.08397798985242844, -0.11525379121303558, -0.018412837758660316, 0.10299014300107956, 0.15033559501171112, 0.010552093386650085, -0.07984074205160141, 0.039587490260601044, 0.02481667324900627, -0.05859579145908356, -0.0625762939453125, 0.014073319733142853, 0.0015836177626624703, 0.03196490556001663, 0.07343727350234985, -0.1955830454826355, -0.16776156425476074, 0.09063279628753662, 0.10708532482385635, -0.07606807351112366, -0.08496564626693726, -0.062039680778980255, -0.052302855998277664, -0.10819682478904724, -0.04495978727936745, 0.08168861269950867, 0.08643577247858047, 0.057241570204496384, -0.0708656758069992, -0.06141377612948418, -0.015121960081160069, 0.02970392256975174, -0.07104760408401489, 0.10532235354185104, 0.08529406040906906, -0.09239748865365982, 0.1082969531416893, -0.0162559375166893, 0.10945971310138702, 0.10879228264093399, 0.006567095872014761, -0.11930949240922928, -0.014637777581810951, 0.0715903788805008, 0.0397842712700367, 0.07923372089862823, -0.0173508133739233, 0.041434112936258316, 0.08430889248847961, 0.0008580670109950006, 0.032824479043483734, -0.09162221103906631, 0.03217216208577156, 0.043680693954229355, -0.0162647794932127, -0.0026401213835924864, -0.0011095914524048567, 0.028970086947083473, 0.09451994299888611, 0.010325507260859013, 0.061824899166822433, -0.012791171669960022, -0.050377752631902695, -0.09414074569940567, 0.1519349068403244, -0.08709553629159927, -0.21677497029304504, -0.15701374411582947, -0.06819543987512589, -0.013964069075882435, -0.009880944155156612, 0.04755771905183792, 0.0074326409958302975, -0.09407215565443039, -0.10560773313045502, 0.03996899351477623, 0.03149649500846863, -0.11512164026498795, -0.03898627310991287, 0.010764295235276222, -0.0020382062066346407, -0.1637575775384903, 0.02093426324427128, 0.029692959040403366, -0.07426157593727112, 0.02843596786260605, 0.08488186448812485, 0.11575815081596375, 0.11982141435146332, 0.0625733807682991, -0.010048751719295979, -0.015206231735646725, 0.17435376346111298, -0.11209605634212494, 0.016387660056352615, 0.09846188873052597, -0.05189842730760574, 0.08274780213832855, 0.15315912663936615, 0.004940715152770281, -0.0867191031575203, 0.03214661404490471, 0.09457072615623474, -0.06831426173448563, -0.27456703782081604, -0.05963926762342453, -0.02936466969549656, 0.05887915939092636, 0.11627808958292007, 0.07930252701044083, -0.013631782494485378, 0.021354034543037415, -0.11188585311174393, -0.04354550316929817, -0.027226753532886505, 0.0714171752333641, 0.05688247084617615, -0.00653422623872757, 0.054579757153987885, -0.06161678209900856, 0.03960622102022171, 0.12161250412464142, 0.02269612066447735, 0.18265950679779053, -0.05168185755610466, 0.18444348871707916, 0.10956321656703949, 0.07373236119747162, -0.012483821250498295, 0.08239799737930298, -0.01948765106499195, 0.07203757017850876, -0.010476833209395409, -0.08616112917661667, -0.03831332176923752, 0.08785190433263779, 0.030781395733356476, -0.020500212907791138, 0.05679420381784439, -0.057378675788640976, 0.06075429171323776, 0.26635250449180603, 0.02330641634762287, -0.17428898811340332, -0.020491009578108788, 0.04627883806824684, -0.041422899812459946, -0.10590283572673798, -0.0016539540374651551, 0.0703834518790245, -0.15242694318294525, 0.050398629158735275, -0.03473932668566704, 0.08242128044366837, -0.16189908981323242, -0.03971930965781212, 0.005763587541878223, 0.0582420639693737, -0.039033323526382446, 0.09854172170162201, -0.19929610192775726, 0.09835158288478851, -0.014844322577118874, 0.025474293157458305, -0.07163941115140915, 0.07011531293392181, -0.005659577436745167, -0.08190000802278519, 0.13935662806034088, -0.0005593967507593334, -0.11301298439502716, -0.06086554750800133, -0.10233943164348602, -0.015837887302041054, 0.02948845736682415, -0.10160243511199951, 0.1195528656244278, 0.004036021418869495, -0.008412267081439495, -0.05348954349756241, -0.007138444110751152, -0.05738883092999458, -0.2003217339515686, 0.08603649586439133, -0.10609335452318192, 0.057606130838394165, -0.048766959458589554, -0.03779544681310654, -0.04267655313014984, 0.14792580902576447, -0.15434987843036652, -0.07831313461065292, -0.10496420413255692, -0.015491551719605923, 0.12034711241722107, -0.06690026819705963, 0.061207398772239685, -0.04202750325202942, 0.17404775321483612, -0.026782717555761337, -0.046737950295209885, 0.017513833940029144, -0.06423065066337585, -0.1738256812095642, -0.03432130068540573, 0.12292912602424622, 0.06375958770513535, 0.012156559154391289, -0.002419378375634551, 0.07026185840368271, -0.002332193311303854, -0.09774325042963028, 0.05108768492937088, 0.11012715846300125, 0.09643707424402237, 0.06444935500621796, -0.020134788006544113, -0.09709049761295319, -0.09455622732639313, -0.07822694629430771, 0.06814686954021454, 0.19601702690124512, -0.044643670320510864, 0.11518179625272751, 0.12281405925750732, -0.11554859578609467, -0.2005811184644699, -0.0770924836397171, -0.029137438163161278, -0.010684615932404995, 0.11164354532957077, -0.20440003275871277, 0.03608215972781181, 0.09061041474342346, -0.019350048154592514, 0.12043632566928864, -0.2523142993450165, -0.1299072504043579, 0.07456368207931519, 0.04788214713335037, -0.15234310925006866, -0.12259519100189209, -0.09648651629686356, -0.024119528010487556, -0.12155436724424362, 0.1125529557466507, -0.027515113353729248, 0.04300139844417572, -0.018210871145129204, 0.054060760885477066, 0.04100298509001732, -0.05657918006181717, 0.1459464132785797, -0.021666202694177628, 0.04858158901333809, -0.09647803008556366, -0.007835578173398972, -0.029974909499287605, -0.044741641730070114, 0.06538978219032288, 0.004136978182941675, 0.03142731264233589, -0.048131540417671204, -0.04974186047911644, -0.03668138012290001, 0.02803097292780876, -0.07159828394651413, -0.05820607393980026, -0.06586656719446182, 0.06750716269016266, 0.06644595414400101, -0.01635625585913658, 0.010712577030062675, -0.06280738115310669, 0.05887316167354584, 0.22046029567718506, 0.07954061776399612, 0.060351233929395676, -0.11950582265853882, -0.044065285474061966, -0.004706075880676508, 0.015047059394419193, -0.0922188088297844, 0.04886426404118538, 0.09210584312677383, 0.04168730974197388, 0.12072372436523438, 0.0006807087338529527, -0.2051129788160324, -0.006852581165730953, 0.07982948422431946, -0.10396208614110947, -0.1843731552362442, 0.052740052342414856, 0.036629654467105865, -0.11713047325611115, -0.09659644961357117, 0.08020393550395966, 0.030982669442892075, -0.015246465802192688, 0.021029017865657806, 0.07564375549554825, 0.03423670306801796, 0.10061857849359512, -0.029660943895578384, 0.052500877529382706, -0.08510704338550568, 0.12647630274295807, 0.13791678845882416, -0.11362165957689285, -0.004557546228170395, 0.07263804227113724, -0.03580177202820778, -0.052846405655145645, -0.04430161789059639, 0.056562334299087524, -0.008073083125054836, -0.053839508444070816, -0.020724887028336525, -0.08253100514411926, 0.0846797302365303, 0.15148137509822845, 0.0032831733115017414, 0.08582905679941177, 0.026427200064063072, -0.008735944516956806, -0.04965841770172119, 0.11450782418251038, 0.03263600543141365, 0.03639579191803932, -0.018891654908657074, 0.04613472521305084, 0.0227518267929554, -0.015062051825225353, 0.02048693783581257, -0.06426136195659637, -0.05176861584186554, 0.022011850029230118, -0.15454192459583282, 0.030342696234583855, -0.08832427114248276, -0.016123401001095772, 0.009991317987442017, 0.008335194550454617, -0.0015814638463780284, 0.016496550291776657, -0.05294477194547653, -0.04715731367468834, -0.047025665640830994, 0.11132711917161942, -0.20598721504211426, 0.005838008597493172, 0.08430753648281097, -0.08148841559886932, 0.07958143949508667, -0.012398035265505314, -0.021950410678982735, -0.00037249087472446263, -0.08203750848770142, -0.008632502518594265, -0.02185857482254505, 0.029264718294143677, 0.01054723933339119, -0.16005899012088776, -0.012439517304301262, 0.004132099449634552, -0.10729440301656723, 0.0006362991407513618, 0.0014361923094838858, -0.14451999962329865, 0.06584134697914124, 0.09609251469373703, -0.0469207689166069, -0.032432492822408676, 0.034026991575956345, 0.045970428735017776, 0.01637326367199421, 0.09937124699354172, -0.02270362339913845, 0.03886071592569351, -0.16100741922855377, -0.033038411289453506, 0.008730516768991947, 0.00739761907607317, 0.04275763779878616, 0.00901502650231123, 0.03978821635246277, -0.024954691529273987, 0.23090860247612, -0.01659931242465973, -0.04864376038312912, 0.03508225083351135, 0.018445486202836037, -0.06826004385948181, 0.02989770472049713, 0.009813321754336357, 0.006897841580212116, 0.009323740378022194, 0.008865615352988243, -0.01895974762737751, -0.05064485967159271, 0.03558213263750076, 0.11747954040765762, 0.11043111234903336, 0.23560284078121185, -0.03934815153479576, 0.03195769712328911, -0.13926775753498077, -0.0524611696600914, 0.012582151219248772, -0.04738370329141617, 0.050178106874227524, -0.048316847532987595, 0.06669829785823822, 0.1311764121055603, -0.13582934439182281, 0.13075529038906097, -0.04274072125554085, -0.026867106556892395, -0.08137490600347519, -0.1996380239725113, -0.03438754379749298, 0.017162637785077095, 0.0007472258876077831, -0.09396766126155853, 0.10548536479473114, 0.16509750485420227, 0.014355874620378017, -0.008078152313828468, 0.06411013007164001, -0.1142348125576973, -0.06762782484292984, -0.03660356253385544, 0.028218869119882584, 0.04381527751684189, 0.004791740328073502, 0.04785959795117378, 0.012044250033795834, 0.05924654379487038, 0.08945833146572113, 0.08858785778284073, 0.0781329870223999, 0.0570266954600811, -0.035505473613739014, -0.04900654777884483, 0.002440220909193158, -0.021672889590263367, -0.03803297504782677, 0.17744925618171692, 0.05716217681765556, 0.03498563915491104, 0.02448752149939537, 0.20293085277080536, -0.009722167626023293, -0.06876105070114136, -0.12949882447719574, 0.15857693552970886, -0.0027589588426053524, 0.019766025245189667, 0.029972026124596596, -0.12364555150270462, 0.020988624542951584, 0.13995108008384705, 0.08947563171386719, 0.017683256417512894, 0.011436752043664455, 0.030419636517763138, 0.023732228204607964, -0.022988880053162575, 0.021235259249806404, 0.03361143171787262, 0.20326456427574158, -0.04944811388850212, 0.08904045075178146, -0.03417430818080902, -0.020588524639606476, -0.0225616954267025, 0.10634005814790726, -0.04327269643545151, 0.02107989974319935, -0.07245777547359467, 0.07791135460138321, -0.05670524761080742, -0.2203660011291504, 0.0338837131857872, -0.0416642464697361, -0.13261465728282928, 0.00026068853912875056, 0.04089108482003212, -0.018458839505910873, 0.03705886751413345, 0.03692173585295677, -0.019023701548576355, 0.17789241671562195, 0.014678968116641045, -0.06942512840032578, -0.09925587475299835, 0.07516228407621384, -0.04842625930905342, 0.27893275022506714, 0.001928569283336401, 0.029018862172961235, 0.08476567268371582, -0.0036500028800219297, -0.13479700684547424, 0.0552326962351799, 0.08360801637172699, -0.05904638022184372, 0.038704764097929, 0.12271421402692795, -0.012366966344416142, 0.11519517749547958, 0.06107115000486374, 0.003264371771365404, 0.06661509722471237, 0.03701312094926834, 0.006339513696730137, -0.08280337601900101, 0.04829861596226692, -0.08258246630430222, 0.11498309671878815, 0.14194366335868835, -0.01933828368782997, 0.014268491417169571, -0.05718740075826645, 0.04122404754161835, -0.04677421599626541, 0.0882822722196579, 0.0019520620116963983, -0.12879861891269684, 0.06685195863246918, 0.03158913552761078, 0.06972482800483704, -0.1925683170557022, -0.07159881293773651, 0.09093067795038223, -0.057342689484357834, -0.02850896120071411, 0.09110692888498306, 0.03443985804915428, 0.033960819244384766, -0.059360306710004807, -0.0964927077293396, 0.02454773336648941, 0.10069461166858673, -0.06032675877213478, -0.0479276068508625 ]
bcf6f5169ff23a1519efbf56f6bdd6815355d90c
# local/GAIR-lima This dataset is a [Lilac](http://lilacml.com) processed dataset. Original dataset: [https://huggingface.co/datasets/GAIR/lima](https://huggingface.co/datasets/GAIR/lima) To download the dataset to a local directory: ```bash lilac download lilacai/GAIR-lima-local-GAIR-lima ``` or from python with: ```py ll.download("lilacai/GAIR-lima-local-GAIR-lima") ```
lilacai/GAIR-lima-local-GAIR-lima
[ "Lilac", "region:us" ]
2024-01-25T17:52:40+00:00
{"tags": ["Lilac"]}
2024-01-25T18:43:58+00:00
[]
[]
TAGS #Lilac #region-us
# local/GAIR-lima This dataset is a Lilac processed dataset. Original dataset: URL To download the dataset to a local directory: or from python with:
[ "# local/GAIR-lima\nThis dataset is a Lilac processed dataset. Original dataset: URL\n\nTo download the dataset to a local directory:\n\n\n\nor from python with:" ]
[ "TAGS\n#Lilac #region-us \n", "# local/GAIR-lima\nThis dataset is a Lilac processed dataset. Original dataset: URL\n\nTo download the dataset to a local directory:\n\n\n\nor from python with:" ]
[ 9, 41 ]
[ "passage: TAGS\n#Lilac #region-us \n# local/GAIR-lima\nThis dataset is a Lilac processed dataset. Original dataset: URL\n\nTo download the dataset to a local directory:\n\n\n\nor from python with:" ]
[ -0.05509041249752045, 0.3097880780696869, -0.0055983224883675575, 0.10126819461584091, 0.02712797001004219, 0.07410131394863129, -0.019595561549067497, 0.10938216000795364, 0.12959715723991394, 0.02772054262459278, 0.16404713690280914, 0.07594393193721771, -0.016766397282481194, 0.023295415565371513, -0.024605128914117813, -0.18105201423168182, -0.009243995882570744, 0.03872361406683922, -0.06090395525097847, 0.013413157314062119, 0.0681186392903328, -0.004480965435504913, 0.1451699435710907, -0.037172891199588776, -0.04735418036580086, 0.08166898787021637, -0.040877413004636765, 0.04517597705125809, 0.0252371896058321, -0.040491145104169846, 0.091406911611557, -0.06964583694934845, 0.036070264875888824, -0.21093523502349854, 0.006681954953819513, -0.009276635013520718, -0.01672213524580002, 0.011201591230928898, 0.05670816823840141, -0.18182535469532013, 0.19212959706783295, -0.06092001497745514, 0.04729987680912018, 0.03869686648249626, -0.10288914293050766, -0.17134949564933777, -0.011641125194728374, -0.08206268399953842, -0.010658961720764637, 0.0641273632645607, 0.011545461602509022, 0.09623344987630844, -0.12064940482378006, 0.07370753586292267, 0.003415970364585519, -0.09125553071498871, -0.007842636667191982, 0.08005590736865997, 0.02270212210714817, 0.15619756281375885, -0.030848221853375435, 0.02328595705330372, -0.02482791244983673, 0.02867068536579609, -0.07850965857505798, -0.1287514567375183, -0.27376848459243774, 0.06939887255430222, -0.09181739389896393, 0.014310927130281925, 0.4302481412887573, -0.05359352380037308, 0.013044821098446846, 0.10106329619884491, -0.055073950439691544, 0.06301058828830719, -0.02376597188413143, 0.11632122099399567, 0.012691588141024113, 0.1051698550581932, -0.009564676322042942, -0.09135305136442184, -0.06779497861862183, -0.0597146674990654, -0.07116817682981491, -0.04598626494407654, 0.04220519959926605, 0.18810595571994781, -0.14010989665985107, -0.041251055896282196, 0.09401741623878479, -0.012585507705807686, 0.009370233863592148, -0.0495075061917305, 0.043926287442445755, 0.013256097212433815, -0.04004104435443878, 0.07467422634363174, 0.10784468054771423, 0.13420234620571136, 0.0732601210474968, 0.03269774466753006, -0.08453844487667084, 0.07864945381879807, 0.13652347028255463, 0.0199432410299778, -0.024470889940857887, -0.0009857758414000273, 0.02708626165986061, -0.16861526668071747, 0.034017473459243774, -0.031122375279664993, -0.04278697445988655, 0.0632830262184143, -0.22115205228328705, 0.1483147293329239, 0.02822474017739296, -0.04106168821454048, -0.06331021338701248, 0.00454946793615818, -0.03143778443336487, 0.013475630432367325, 0.04504988715052605, 0.046667806804180145, -0.06417527049779892, 0.05736243352293968, -0.020359942689538002, -0.0031840165611356497, -0.001603411277756095, -0.09720178693532944, -0.16235962510108948, 0.03620895743370056, 0.041636671870946884, 0.025908051058650017, 0.05061899125576019, -0.03754451498389244, 0.04718336462974548, -0.1005391925573349, -0.17875997722148895, 0.024083079770207405, 0.05195005610585213, -0.06556505709886551, 0.11554183810949326, 0.0372137613594532, 0.11570676416158676, 0.024966401979327202, 0.08653249591588974, -0.2041071057319641, -0.08122703433036804, 0.046872083097696304, -0.08419951796531677, 0.07105059176683426, -0.22182442247867584, 0.001950109493918717, -0.17816917598247528, -0.023995203897356987, 0.14650151133537292, 0.10020966082811356, -0.06585518270730972, 0.0829298347234726, -0.11116479337215424, -0.012742643244564533, 0.046079207211732864, -0.07073605805635452, -0.020801695063710213, 0.10090028494596481, -0.3151422142982483, 0.06570238620042801, 0.08363594114780426, -0.05976836383342743, -0.036890070885419846, 0.07164608687162399, -0.0062524001114070415, 0.028919074684381485, 0.0950050950050354, 0.38556480407714844, 0.018297363072633743, -0.004766043741255999, 0.02100350148975849, 0.1014103889465332, -0.04672813415527344, -0.1050131767988205, 0.09096633642911911, -0.052519895136356354, -0.19008439779281616, 0.036322277039289474, -0.08024071156978607, 0.07650084048509598, -0.028354963287711143, -0.09118055552244186, -0.060335248708724976, -0.1342577338218689, -0.1185302734375, 0.043708205223083496, 0.037702590227127075, 0.02613929472863674, 0.10905121266841888, -0.05625493451952934, 0.14850063621997833, -0.05549554526805878, 0.010148867033421993, 0.04391590133309364, 0.11305465549230576, 0.0321294330060482, -0.01640392653644085, -0.07473624497652054, -0.06640184670686722, 0.01954825036227703, 0.05883327126502991, -0.014798610471189022, -0.1723846197128296, -0.005137566011399031, 0.012368719093501568, 0.08193527907133102, 0.13925239443778992, 0.03960161656141281, 0.005278981756418943, 0.033717721700668335, -0.08091296255588531, 0.03189469128847122, -0.018767645582556725, 0.12223300337791443, -0.1316501349210739, -0.05076334998011589, 0.007288116961717606, 0.02569267340004444, 0.009604571387171745, -0.0757613480091095, 0.05505398288369179, -0.016622701659798622, 0.016871679574251175, -0.05760813131928444, 0.03577166050672531, 0.011212115176022053, -0.015991361811757088, 0.03748872131109238, 0.11355715990066528, -0.08113561570644379, 0.04617522656917572, 0.01805029809474945, 0.023541709408164024, 0.03709697723388672, -0.01442230399698019, 0.02463577874004841, 0.012113751843571663, 0.07772982865571976, -0.010352743789553642, -0.007977704517543316, 0.06178092956542969, -0.04581138491630554, 0.027664415538311005, -0.034412823617458344, -0.048889804631471634, -0.02153231017291546, 0.07818859815597534, 0.28192731738090515, -0.24085718393325806, 0.0475924015045166, 0.057648345828056335, 0.027856791391968727, 0.14553888142108917, -0.022692589089274406, -0.07300729304552078, -0.07583650201559067, -0.03645041212439537, -0.006232140120118856, 0.10572336614131927, -0.007303064689040184, 0.04758177325129509, 0.03387191519141197, -0.07474347203969955, 0.02838658168911934, -0.023654047399759293, -0.07178322225809097, -0.056965041905641556, -0.031301990151405334, -0.1584700644016266, 0.058284688740968704, -0.03656698018312454, -0.0029664235189557076, -0.062223244458436966, 0.008736766874790192, -0.028295814990997314, 0.05259200558066368, 0.01462988369166851, 0.1568821668624878, -0.19256694614887238, -0.18618357181549072, -0.07087220996618271, -0.0734463557600975, -0.037146132439374924, -0.0056565795093774796, 0.027508947998285294, -0.061623357236385345, -0.03600135073065758, -0.04461749270558357, 0.08171392232179642, -0.1567835956811905, -0.05386839807033539, -0.2203911989927292, 0.007627489045262337, -0.06116686388850212, -0.15484122931957245, 0.03029867634177208, -0.039112236350774765, 0.0104019520804286, 0.03245541825890541, -0.10949209332466125, 0.07648426294326782, 0.04178265109658241, 0.10204308480024338, 0.032063987106084824, 0.019872475415468216, 0.2525540888309479, -0.05663398280739784, 0.007875453680753708, -0.01297725923359394, -0.109613336622715, -0.005311372224241495, 0.06357721984386444, 0.10323089361190796, -0.20877821743488312, -0.04275984689593315, 0.0415465384721756, -0.05446728691458702, -0.18635015189647675, -0.20138351619243622, -0.06467930227518082, 0.0781589150428772, 0.11197903752326965, 0.09013250470161438, -0.19919823110103607, 0.04429923743009567, 0.05610758438706398, -0.005573443602770567, -0.20082704722881317, -0.015982963144779205, 0.1072254627943039, -0.06919599324464798, -0.006768286228179932, -0.10641073435544968, -0.050177786499261856, 0.11738434433937073, 0.11143297702074051, 0.19864465296268463, 0.048919010907411575, 0.13014937937259674, 0.07610353082418442, 0.24016663432121277, 0.030262891203165054, -0.011472309939563274, 0.04686644300818443, 0.04112076386809349, -0.0069137075915932655, -0.03756478801369667, 0.047672610729932785, -0.05311210826039314, 0.08375418931245804, -0.1382986307144165, 0.007890029810369015, -0.11754079908132553, 0.036326825618743896, 0.05326853320002556, 0.0183210801333189, -0.21075809001922607, 0.08089429140090942, 0.051769085228443146, 0.11620830744504929, -0.10265917330980301, 0.07493839412927628, 0.03581224009394646, -0.044774606823921204, -0.03205783665180206, 0.027969539165496826, 0.06048094853758812, -0.19229243695735931, -0.07510671764612198, 0.048860639333724976, -0.13015586137771606, 0.02495996654033661, 0.0570448637008667, 0.11988290399312973, 0.11554623395204544, 0.07328623533248901, -0.05927398055791855, 0.010510541498661041, -0.07192463427782059, -0.026627222076058388, 0.13372096419334412, 0.09456900507211685, 0.11529945582151413, -0.07001751661300659, -0.16652031242847443, -0.15545275807380676, 0.10987291485071182, 0.05763490870594978, -0.022894056513905525, -0.03130779787898064, -0.07274946570396423, 0.031922806054353714, -0.04362724721431732, -0.18187296390533447, -0.12820376455783844, -0.19248275458812714, -0.004743530880659819, 0.1782410442829132, -0.13645039498806, 0.0327906459569931, 0.0340426079928875, 0.3068735599517822, 0.09560643136501312, 0.058356549590826035, -0.03305764123797417, -0.06580639630556107, -0.039256494492292404, 0.2563002109527588, -0.02232307940721512, 0.0709519013762474, -0.043830517679452896, -0.10292870551347733, 0.01814206689596176, -0.14775879681110382, -0.033845290541648865, -0.046777334064245224, 0.12300366908311844, -0.06601573526859283, 0.0919097438454628, 0.0813254863023758, -0.03101167269051075, 0.03188663348555565, 0.06559786200523376, -0.12593506276607513, -0.10276380926370621, -0.015230345539748669, 0.05321177467703819, -0.05377552658319473, 0.12808287143707275, -0.08250994980335236, 0.041564151644706726, -0.011204973794519901, -0.005818564910441637, 0.22977161407470703, -0.013969231396913528, -0.0932464748620987, 0.06998326629400253, 0.12135206162929535, -0.11074605584144592, -0.1074402779340744, -0.012843158096075058, -0.0904310867190361, -0.09649353474378586, 0.016057034954428673, -0.2725088596343994, 0.18167249858379364, 0.13965481519699097, -0.025744983926415443, 0.21621926128864288, -0.36365699768066406, 0.04452883452177048, 0.1750117987394333, -0.016122933477163315, 0.2010226547718048, -0.19881907105445862, -0.0701797679066658, -0.009853123687207699, 0.002026301808655262, 0.2422093152999878, -0.15692315995693207, 0.09103666245937347, -0.08187504857778549, 0.09054016321897507, 0.055729929357767105, 0.03881194442510605, 0.13737180829048157, 0.0203956738114357, 0.06968477368354797, 0.001525991247035563, -0.052762437611818314, 0.058969028294086456, 0.012445634230971336, 0.09891481697559357, -0.06011670082807541, 0.10252345353364944, -0.0811547189950943, 0.03441932052373886, -0.03334365785121918, 0.09990020096302032, 0.016947906464338303, -0.030128013342618942, -0.1113867461681366, -0.06385664641857147, -0.02044963650405407, 0.011175822466611862, -0.016040600836277008, 0.10523134469985962, -0.1281803399324417, -0.07493814080953598, -0.18558600544929504, 0.1095491573214531, -0.16450677812099457, -0.00022075435845181346, -0.06112349033355713, 0.08846374601125717, -0.17892417311668396, -0.03471647575497627, 0.09474612027406693, 0.01619725115597248, 0.0036989867221564054, 0.02664036676287651, -0.016480058431625366, -0.014876483008265495, 0.11541667580604553, -0.1433960199356079, 0.023218831047415733, -0.022905685007572174, -0.08492369204759598, 0.023155471310019493, -0.040437642484903336, 0.10688812285661697, 0.029090145602822304, -0.03084590658545494, -0.04240062087774277, 0.05197913944721222, 0.008100547827780247, -0.011834864504635334, 0.09544637799263, -0.0782647654414177, -0.15889212489128113, 0.14780676364898682, 0.10315289348363876, 0.005118127912282944, -0.009867813438177109, 0.06492530554533005, -0.03920653834939003, -0.11053988337516785, 0.030000396072864532, 0.051064275205135345, -0.1351071298122406, -0.030598333105444908, 0.02935190685093403, 0.054750584065914154, -0.03438732773065567, 0.08304706960916519, 0.019651338458061218, 0.12038331478834152, -0.07133662700653076, -0.11078037321567535, -0.06541988253593445, 0.03342299908399582, -0.01595955714583397, -0.028225278481841087, -0.07670467346906662, -0.08614993840456009, -0.003178482875227928, 0.05095870420336723, 0.0036467129830271006, -0.08381226658821106, -0.125712051987648, 0.01106718648225069, -0.00975339487195015, 0.06053498387336731, 0.03859074413776398, 0.06275740265846252, 0.03305478394031525, 0.005039465148001909, -0.07379502058029175, 0.009936029091477394, -0.030583588406443596, 0.013779854401946068, 0.005715631879866123, 0.07258778810501099, -0.15528523921966553, -0.08833788335323334, 0.0996818020939827, 0.007376112509518862, 0.06312351673841476, 0.15074138343334198, -0.03335177153348923, 0.04920262470841408, -0.05660126730799675, -0.0451970100402832, 0.09782487154006958, 0.04514212906360626, 0.026549052447080612, 0.03468703851103783, -0.015506280586123466, 0.0015481716254726052, 0.00635792501270771, 0.010917515493929386, 0.1768684983253479, -0.1288677453994751, -0.09097954630851746, -0.01847648061811924, -0.12113450467586517, 0.0019618268124759197, -0.11839686334133148, 0.02082560770213604, 0.14491018652915955, 0.0318218395113945, 0.033573124557733536, 0.04201004281640053, 0.0505254901945591, -0.033345744013786316, -0.04088345170021057, -0.058089643716812134, -0.11227590590715408, 0.01889769546687603, -0.01798516884446144, -0.057387229055166245, 0.36646002531051636, 0.04906671494245529, 0.011074951849877834, -0.09067104756832123, 0.2547408938407898, 0.052140600979328156, -0.05297762528061867, 0.22215582430362701, 0.015956513583660126, -0.08058944344520569, -0.057682208716869354, 0.07628446072340012, -0.004821515176445246, 0.1241375058889389, -0.08871196210384369, -0.0024515462573617697, 0.13889779150485992, -0.0473160594701767, -0.029564812779426575, -0.049133896827697754, 0.037463512271642685, -0.20312611758708954, 0.08160428702831268, -0.010803434997797012, 0.023781461641192436, 0.07601714879274368, 0.030734317377209663, -0.020880077034235, 0.02181915193796158, 0.05113256722688675, -0.08647538721561432, -0.1367366462945938, 0.02677464485168457, -0.023278769105672836, -0.07285131514072418, -0.025202341377735138, -0.07316514849662781, 0.024078460410237312, 0.013885353691875935, 0.008585314266383648, -0.0018735288176685572, 0.007950539700686932, 0.035669583827257156, -0.08297552913427353, -0.04182081297039986, 0.04189540445804596, -0.07438153028488159, 0.007615452632308006, 0.07370782643556595, -0.09514522552490234, -0.12154871225357056, -0.009555281139910221, 0.041316013783216476, 0.06632973253726959, 0.1165737733244896, -0.2244715839624405, -0.014814374968409538, -0.08235569298267365, 0.05350058153271675, -0.05906778946518898, 0.0456535741686821, 0.11583846062421799, -0.051329873502254486, 0.05281894654035568, 0.23346948623657227, 0.042596954852342606, 0.11430753767490387, -0.02554802969098091, 0.15023399889469147, 0.013119409792125225, -0.04120815172791481, -0.05888775363564491, -0.03575696051120758, -0.040623992681503296, 0.20814388990402222, 0.22454260289669037, -0.002697824267670512, -0.029240429401397705, 0.0004989965818822384, 0.014204747974872589, 0.04398661479353905, 0.12427888810634613, 0.037360385060310364, 0.2751617431640625, -0.035452909767627716, -0.1266997903585434, -0.08475742489099503, -0.007242195773869753, -0.19158582389354706, -0.023246152326464653, 0.06077553704380989, -0.043722301721572876, -0.06804797053337097, 0.1812848150730133, -0.21856258809566498, 0.19533327221870422, 0.04050310328602791, -0.05813436955213547, -0.023314228281378746, -0.0600515753030777, -0.0995241329073906, 0.003494860138744116, 0.030825437977910042, -0.0808931365609169, -0.07102518528699875, 0.05002395436167717, 0.06699032336473465, -0.3362065851688385, -0.19942143559455872, 0.10826073586940765, -0.07088309526443481, 0.19620797038078308, -0.013442490249872208, 0.04980630427598953, 0.015464026480913162, 0.0013908123364672065, -0.12768857181072235, 0.020765764638781548, -0.015120994299650192, 0.09619460254907608, -0.0226424727588892, -0.007716084364801645, -0.08852719515562057, 0.049859363585710526, 0.049847736954689026, 0.1731189638376236, 0.029648080468177795, 0.04242578148841858, 0.11154050379991531, -0.08961517363786697, -0.031153954565525055, -0.10129089653491974, 0.03862762823700905, 0.1067444458603859, -0.028350094333291054, -0.003598796436563134, -0.03576602414250374, 0.0016282323049381375, 0.08090747892856598, -0.17448975145816803, 0.014913195744156837, -0.003956197760999203, -0.057327158749103546, 0.10255652666091919, 0.04627246409654617, 0.013718043453991413, 0.054752517491579056, -0.021763889119029045, -0.013717379420995712, -0.0003778533427976072, 0.06650882959365845, 0.08283596485853195, -0.024645574390888214, -0.016495928168296814, -0.03558774292469025, 0.044797416776418686, -0.07595986872911453, -0.05520269274711609, -0.1025170236825943 ]
6cc59e67524402cff2dd55d90043fda1a19db7aa
# Evol-Instruct-Python-1k - QLora Training Test This is a minor edit of the original [`mlabonne/Evol-Instruct-Python-26k`](https://huggingface.co/datasets/mlabonne/Evol-Instruct-Python-26k), which iteself was reduced to only 1000 samples for testing QLora training. The dataset was created by filtering out a few rows (instruction + output) with more than 2048 tokens, and then by keeping the 1000 longest samples. Here is the distribution of the number of tokens in each row using Llama's tokenizer: ![](https://i.imgur.com/nwJbg7S.png)
ptoro/Evol-Instruct-Python-1k-testing
[ "region:us" ]
2024-01-25T18:13:27+00:00
{"configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}], "dataset_info": {"features": [{"name": "instruction", "dtype": "string"}, {"name": "output", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 5465833, "num_examples": 1000}], "download_size": 2322359, "dataset_size": 5465833}}
2024-01-25T18:55:43+00:00
[]
[]
TAGS #region-us
# Evol-Instruct-Python-1k - QLora Training Test This is a minor edit of the original 'mlabonne/Evol-Instruct-Python-26k', which iteself was reduced to only 1000 samples for testing QLora training. The dataset was created by filtering out a few rows (instruction + output) with more than 2048 tokens, and then by keeping the 1000 longest samples. Here is the distribution of the number of tokens in each row using Llama's tokenizer: ![](https://i.URL
[ "# Evol-Instruct-Python-1k - QLora Training Test\n\nThis is a minor edit of the original 'mlabonne/Evol-Instruct-Python-26k', which iteself was reduced to only 1000 samples for testing QLora training.\n\nThe dataset was created by filtering out a few rows (instruction + output) with more than 2048 tokens, and then by keeping the 1000 longest samples.\n\nHere is the distribution of the number of tokens in each row using Llama's tokenizer:\n\n![](https://i.URL" ]
[ "TAGS\n#region-us \n", "# Evol-Instruct-Python-1k - QLora Training Test\n\nThis is a minor edit of the original 'mlabonne/Evol-Instruct-Python-26k', which iteself was reduced to only 1000 samples for testing QLora training.\n\nThe dataset was created by filtering out a few rows (instruction + output) with more than 2048 tokens, and then by keeping the 1000 longest samples.\n\nHere is the distribution of the number of tokens in each row using Llama's tokenizer:\n\n![](https://i.URL" ]
[ 6, 132 ]
[ "passage: TAGS\n#region-us \n# Evol-Instruct-Python-1k - QLora Training Test\n\nThis is a minor edit of the original 'mlabonne/Evol-Instruct-Python-26k', which iteself was reduced to only 1000 samples for testing QLora training.\n\nThe dataset was created by filtering out a few rows (instruction + output) with more than 2048 tokens, and then by keeping the 1000 longest samples.\n\nHere is the distribution of the number of tokens in each row using Llama's tokenizer:\n\n![](https://i.URL" ]
[ -0.06448063999414444, -0.029847081750631332, -0.0020918664522469044, 0.06626249849796295, 0.06198959797620773, 0.009834266267716885, -0.014679094776511192, 0.20022712647914886, 0.04410091042518616, -0.032992806285619736, 0.1271989941596985, 0.044288717210292816, -0.028899388387799263, 0.003188316011801362, -0.01563192531466484, -0.12132757157087326, -0.017892522737383842, 0.07048562914133072, -0.11884888261556625, 0.12880264222621918, 0.002702169120311737, -0.06724361330270767, 0.040575675666332245, -0.05420847609639168, -0.038506895303726196, 0.03306940943002701, -0.00420283991843462, -0.11865255981683731, 0.0991983637213707, -0.09518440812826157, 0.11588557064533234, -0.01541435532271862, 0.02315909042954445, -0.22673532366752625, 0.04282660409808159, 0.018162067979574203, 0.009486392140388489, 0.023604918271303177, -0.04886196181178093, 0.08335834741592407, 0.05752461403608322, 0.062048207968473434, -0.0016037434106692672, -0.04223983362317085, -0.10575228929519653, -0.039531249552965164, -0.09950314462184906, -0.07748838514089584, 0.03656364604830742, 0.08295708894729614, 0.017882946878671646, 0.29802948236465454, -0.22577469050884247, 0.05563610419631004, 0.09988769143819809, -0.2233564406633377, -0.028975648805499077, 0.06595518440008163, 0.020749956369400024, 0.0513228215277195, -0.04987569525837898, 0.014593444764614105, 0.12688010931015015, 0.0460958294570446, -0.08463282883167267, -0.07739480584859848, -0.04027701914310455, -0.017425131052732468, -0.15635116398334503, -0.10997963696718216, 0.1668229103088379, 0.02999652363359928, -0.08410505205392838, 0.10637117177248001, -0.04300425201654434, -0.24662014842033386, 0.05169396474957466, 0.02526336908340454, 0.009608474560081959, -0.049905940890312195, -0.06527163088321686, 0.029758701100945473, -0.0888289138674736, -0.16133764386177063, -0.10093314945697784, 0.12271531671285629, 0.08851641416549683, 0.049214933067560196, -0.10027037560939789, 0.07463907450437546, 0.008069893345236778, -0.03201641887426376, -0.10959376394748688, -0.052020229399204254, -0.027371959760785103, -0.04608432203531265, -0.004360036924481392, -0.10885745286941528, 0.1613166630268097, 0.06764443218708038, 0.0021039482671767473, 0.003344579366967082, 0.09782905131578445, 0.021366320550441742, 0.027264490723609924, 0.15311425924301147, 0.09728171676397324, 0.16925878822803497, 0.009833515621721745, 0.08669428527355194, -0.030091335996985435, -0.009315790608525276, -0.010376492515206337, -0.12855784595012665, 0.10167637467384338, 0.16777820885181427, -0.06765852868556976, 0.016492534428834915, 0.06946539878845215, -0.0210652444511652, -0.2819995880126953, -0.08374019712209702, -0.0009903189493343234, 0.003538839053362608, -0.062373168766498566, 0.015610956586897373, -0.10415580123662949, -0.06237034872174263, -0.0316196009516716, -0.10085950791835785, -0.03370915725827217, -0.00042496464448049664, -0.09959019720554352, -0.10150434821844101, -0.012103590182960033, -0.018339915201067924, 0.0330837145447731, -0.18390701711177826, -0.10419697314500809, -0.06673447042703629, -0.006870847195386887, 0.012537472881376743, 0.08610009402036667, -0.036021213978528976, -0.02819867804646492, -0.04740333557128906, 0.007396929897367954, 0.05264848843216896, -0.1038745865225792, 0.17836236953735352, 0.09138790518045425, 0.10436713695526123, -0.08864373713731766, 0.008392304182052612, -0.06260764598846436, 0.07152090221643448, 0.08650831878185272, 0.006233056541532278, -0.041890401393175125, -0.007078190799802542, 0.04074912518262863, -0.10827868431806564, 0.04804767668247223, 0.008721536956727505, 0.12129005789756775, 0.1256132423877716, -0.1851383000612259, 0.002560708671808243, 0.1250661462545395, -0.08439547568559647, -0.07916902750730515, 0.11793551594018936, -0.03513577952980995, 0.09068022668361664, 0.01785568706691265, 0.293222576379776, 0.18393197655677795, -0.03475310280919075, -0.0991518497467041, 0.042502522468566895, -0.04354357346892357, -0.1612386852502823, 0.05285945162177086, 0.09217210859060287, -0.16617785394191742, 0.025617875158786774, -0.0933854952454567, 0.010001027025282383, -0.09784502536058426, -0.06054017320275307, -0.03400614112615585, -0.044211216270923615, 0.04583345726132393, -0.04456767812371254, 0.09180755168199539, -0.08828024566173553, 0.00799785740673542, -0.12368427962064743, 0.15212979912757874, -0.06707314401865005, 0.007638116832822561, -0.11347531527280807, 0.22043532133102417, 0.0044235908426344395, 0.003239353885874152, -0.08566323667764664, 0.05232566222548485, 0.054149869829416275, 0.10265422612428665, -0.007006745785474777, 0.1347118765115738, 0.04435186833143234, -0.060040105134248734, 0.08937190473079681, -0.042211391031742096, -0.12000247836112976, -0.07971006631851196, -0.11955776810646057, 0.10016556829214096, -0.0017079170793294907, -0.09264161437749863, -0.07596216350793839, -0.06857957690954208, -0.03721136227250099, -0.06114080548286438, -0.0872606635093689, -0.004331196658313274, 0.08393196761608124, 0.059086572378873825, 0.08045399934053421, 0.007947810925543308, -0.026925360783934593, 0.062391333281993866, -0.00552234286442399, -0.00034672714537009597, -0.10384342074394226, 0.019175810739398003, 0.08176582306623459, 0.08900446444749832, 0.05314343795180321, -0.041728317737579346, -0.0746743381023407, -0.06172902137041092, 0.05692778900265694, -0.004047587048262358, 0.026247689500451088, 0.18305881321430206, 0.019684936851263046, 0.09915149956941605, -0.08109988272190094, -0.013143926858901978, -0.005089320708066225, -0.09002272039651871, 0.01800481788814068, 0.14640070497989655, 0.08268094062805176, -0.04004599153995514, -0.04709562286734581, 0.05874716490507126, -0.0896371528506279, 0.13094742596149445, -0.10574518889188766, -0.07470549643039703, 0.010361602529883385, 0.0671299621462822, -0.003440248314291239, 0.1196816936135292, -0.02107306942343712, 0.003838212927803397, -0.02311917208135128, 0.07185440510511398, 0.019188005477190018, -0.17688848078250885, -0.07849444448947906, -0.028471436351537704, -0.049476951360702515, -0.22718466818332672, 0.03143898770213127, -0.024954112246632576, 0.07604997605085373, 0.008543065749108791, -0.0772787407040596, -0.02268335595726967, 0.015554743818938732, -0.007842058315873146, 0.1692596822977066, -0.0321471206843853, -0.12235952913761139, -0.11327970772981644, 0.015460840426385403, 0.04581337049603462, -0.04865604639053345, 0.06899842619895935, -0.1595086008310318, 0.05341731756925583, -0.019267458468675613, 0.10798705369234085, -0.06656394898891449, 0.09100384265184402, 0.032345544546842575, 0.02954682894051075, -0.0018141764448955655, -0.10151390731334686, 0.023645086213946342, -0.11570858210325241, -0.10015129297971725, 0.17418557405471802, -0.13516755402088165, 0.0598047636449337, 0.07588757574558258, -0.1244957372546196, 0.07732980698347092, -0.008599827066063881, 0.23515217006206512, -0.03262120485305786, -0.06778813898563385, 0.07422634214162827, 0.03038732148706913, -0.014902635477483273, -0.01032930426299572, -0.023121802136301994, -0.07761721312999725, 0.03629174083471298, 0.03450189530849457, -0.09344787895679474, -0.2551608383655548, 0.042840439826250076, 0.07965737581253052, 0.07104986160993576, 0.030247140675783157, 0.06234526261687279, -0.11741966009140015, 0.1200791448354721, 0.13351933658123016, -0.04926261678338051, -0.2725349962711334, -0.05062267556786537, 0.062240198254585266, -0.0028918192256242037, -0.036710042506456375, -0.11913455277681351, -0.054071731865406036, 0.05639878287911415, 0.10261672735214233, 0.23780739307403564, -0.06290385127067566, -0.10971710830926895, 0.09149166196584702, 0.16439288854599, 0.1053532287478447, 0.0830114334821701, -0.0023398944176733494, -0.0312889888882637, 0.02139442041516304, 0.04825006425380707, -0.0965096727013588, -0.05018802732229233, 0.026364563032984734, -0.0012969301315024495, -0.13637275993824005, 0.1212894394993782, 0.025269925594329834, 0.22900943458080292, 0.07336152344942093, -0.03902096301317215, -0.0026252444367855787, -0.07431329041719437, -0.10927847772836685, -0.02763441950082779, 0.134602889418602, 0.2014007270336151, -0.09539265930652618, -0.09692656248807907, -0.05091883987188339, 0.08784550428390503, -0.09001617878675461, 0.054728321731090546, -0.014301889576017857, -0.023189162835478783, -0.02596772089600563, 0.08102714270353317, -0.2882784307003021, 0.2188735008239746, -0.015392372384667397, 0.08620893210172653, -0.008249794133007526, -0.09890980273485184, 0.015443389303982258, -0.049347780644893646, 0.05591680854558945, -0.014567511156201363, -0.13253352046012878, -0.28481197357177734, -0.04274079576134682, 0.10115470737218857, 0.025396309792995453, 0.1584770530462265, 0.18609680235385895, -0.04412614926695824, 0.11149709671735764, -0.039818353950977325, 0.008993279188871384, 0.039920058101415634, 0.08926062285900116, -0.08508720248937607, 0.05368546396493912, -0.014063848182559013, -0.05104796588420868, -0.03629230335354805, -0.06039256229996681, -0.062050528824329376, -0.06924356520175934, -0.008117356337606907, 0.0018906203331425786, 0.03278049826622009, 0.10238378494977951, -0.08311739563941956, 0.02959199622273445, 0.026651453226804733, -0.0887797623872757, 0.012189583852887154, -0.029738057404756546, -0.002964310348033905, -0.007400295231491327, -0.002716071205213666, 0.05985225737094879, 0.06559363007545471, -0.04909422993659973, 0.06631465256214142, 0.10397719591856003, -0.07650331407785416, -0.0436079315841198, -0.1011398583650589, 0.11177493631839752, -0.183271586894989, -0.04673874005675316, -0.019422611221671104, -0.087922103703022, 0.20798909664154053, -0.09841152280569077, -0.015026387758553028, 0.31204408407211304, 0.19591301679611206, -0.05982605740427971, -0.10434111952781677, 0.020172199234366417, -0.06850215047597885, -0.13500690460205078, -0.014924549497663975, 0.03669584542512894, 0.05133990943431854, -0.13386228680610657, -0.1712535172700882, -0.08477500826120377, 0.1590413600206375, 0.05219465494155884, 0.19512061774730682, -0.3069952428340912, 0.0018369299359619617, 0.12552356719970703, 0.09216157346963882, 0.27347448468208313, -0.08272270113229752, -0.010696620680391788, 0.09190692007541656, 0.09869074821472168, 0.06022269278764725, -0.04923892766237259, 0.17477825284004211, -0.11886834353208542, 0.14923468232154846, 0.051789216697216034, -0.008998473174870014, 0.12525759637355804, -0.021902192384004593, 0.12028662115335464, -0.0045560551807284355, -0.09463860839605331, -0.0440654493868351, -0.05082143470644951, 0.028459172695875168, -0.03794443979859352, 0.04741387814283371, -0.12756413221359253, -0.0005894265486858785, -0.02537931315600872, 0.000618906517047435, 0.04496050626039505, -0.058354612439870834, -0.045742690563201904, -0.018611719831824303, -0.061476148664951324, -0.004726880230009556, 0.04338906332850456, -0.027385832741856575, 0.11473195999860764, -0.06534512341022491, -0.025747964158654213, 0.03610646352171898, -0.015206372365355492, 0.045509159564971924, 0.02682996727526188, 0.09857609868049622, -0.0946202278137207, -0.044753093272447586, 0.11370602250099182, 0.09902657568454742, 0.0013910973211750388, 0.03767392411828041, -0.037483133375644684, 0.11376411467790604, 0.04981337487697601, -0.02187235653400421, -0.007890291512012482, 0.022897787392139435, 0.016678286716341972, -0.0916246548295021, 0.04180093854665756, 0.06423363089561462, 0.005114382598549128, -0.022135699167847633, -0.03971356526017189, -0.07786589860916138, -0.050805747509002686, 0.19689133763313293, 0.08081700652837753, 0.06627444177865982, -0.0832856073975563, 0.03914076089859009, -0.05876801535487175, 0.007848243229091167, 0.0692468136548996, 0.03060171939432621, -0.006944125052541494, -0.07462973892688751, 0.03547346591949463, 0.10568582266569138, -0.07306631654500961, -0.04474175348877907, -0.14839458465576172, -0.0332985557615757, 0.05223932862281799, 0.2002042979001999, 0.08055894076824188, 0.046840522438287735, 0.011728717014193535, -0.05496823787689209, -0.10045824944972992, 0.011985661461949348, -0.046956516802310944, 0.006817513145506382, -0.17051251232624054, 0.1061016321182251, -0.029249705374240875, 0.1866406947374344, -0.0337953045964241, -0.09926002472639084, -0.11468680202960968, 0.12282074987888336, -0.1160145178437233, 0.09356748312711716, 0.035334035754203796, 0.021993128582835197, 0.020820753648877144, 0.07257656008005142, -0.10048533231019974, 0.010974089615046978, -0.12835998833179474, 0.026426034048199654, 0.01794399507343769, -0.01918148249387741, 0.04707818105816841, 0.01703185774385929, 0.0021851176861673594, -0.024829747155308723, 0.05836203321814537, -0.010056141763925552, -0.046485092490911484, 0.12479743361473083, 0.01220555230975151, -0.003690670942887664, 0.042342595756053925, 0.049360375851392746, 0.03273852914571762, -0.07118557393550873, 0.07583516091108322, 0.003399377688765526, 0.019117258489131927, 0.09373310953378677, 0.0235437098890543, -0.10650166869163513, -0.021056436002254486, -0.038113098591566086, -0.03067985735833645, -0.046377211809158325, 0.020539600402116776, 0.050270818173885345, 0.08997593820095062, 0.17442628741264343, -0.07369919121265411, 0.08678382635116577, -0.03727306053042412, -0.026439260691404343, 0.019832145422697067, -0.07997119426727295, -0.0403648279607296, -0.022091669961810112, 0.029457824304699898, -0.006960371974855661, 0.07501550018787384, 0.023757603019475937, 0.03612817823886871, -0.03188958019018173, -0.055007923394441605, -0.04210318997502327, -0.04368653520941734, 0.31200188398361206, 0.07158466428518295, -0.004552850965410471, -0.01670391857624054, 0.026394231244921684, -0.002738021081313491, 0.04032490402460098, 0.07015229761600494, 0.09946519136428833, 0.04871693626046181, 0.03244742378592491, -0.06406459212303162, 0.02897217869758606, 0.05907941237092018, -0.062434397637844086, 0.018948424607515335, -0.002663208870217204, 0.009006555192172527, -0.011046960018575191, 0.071383535861969, -0.02209741622209549, 0.05555780231952667, -0.021799784153699875, -0.03786284476518631, -0.12367517501115799, -0.008284233510494232, -0.053128719329833984, -0.1250210553407669, 0.04915732517838478, -0.028245912864804268, -0.05441909283399582, 0.12116461992263794, 0.020540781319141388, -0.08561111241579056, 0.06337203085422516, -0.018197163939476013, -0.05285140872001648, 0.10597571730613708, -0.04793892800807953, -0.055589303374290466, -0.10598919540643692, 0.00175962143111974, 0.07607389986515045, -0.13367371261119843, -0.024051779881119728, 0.043543796986341476, 0.014734025113284588, 0.021552469581365585, -0.1287127137184143, -0.002203823998570442, 0.013093939982354641, 0.027560019865632057, 0.06838557124137878, 0.0691278725862503, 0.05324250087141991, -0.06991197913885117, 0.021851839497685432, 0.19503773748874664, 0.030938386917114258, -0.014624455943703651, -0.10908892750740051, 0.10098007321357727, -0.02282312512397766, 0.0063688503578305244, -0.00788906030356884, -0.03343803435564041, -0.029571523889899254, 0.22025905549526215, -0.0029529102612286806, -0.02983637899160385, -0.005598787683993578, -0.03711612522602081, 0.018316298723220825, -0.08035815507173538, 0.181285098195076, 0.09641539305448532, -0.0461353175342083, -0.06287918239831924, -0.14945337176322937, -0.1266954094171524, -0.031012220308184624, -0.0385679267346859, 0.006516140885651112, 0.09382116049528122, 0.0191936157643795, -0.16582593321800232, 0.04590250924229622, -0.09210328012704849, 0.08129597455263138, -0.051776595413684845, 0.01549759041517973, -0.09088506549596786, -0.030720176175236702, -0.021040091291069984, 0.016396384686231613, 0.06865907460451126, -0.07893218100070953, -0.001591285108588636, -0.11790697276592255, -0.02791323885321617, -0.11404278129339218, -0.20606641471385956, 0.14976435899734497, 0.1244601160287857, 0.09158281236886978, 0.0292398389428854, 0.07858149707317352, 0.04121502488851547, -0.047856878489255905, -0.07648684084415436, 0.18984481692314148, 0.05556967109441757, -0.017767569050192833, -0.021567022427916527, 0.0030347818974405527, 0.04817672446370125, 0.08057545870542526, 0.01628398336470127, 0.07126416265964508, 0.011835209093987942, -0.06938102096319199, -0.02697148360311985, -0.13465894758701324, 0.05750032141804695, -0.018755069002509117, 0.08221447467803955, 0.15908262133598328, -0.0341673418879509, 0.08521173149347305, -0.08419516682624817, 0.062078461050987244, -0.00290085026063025, -0.12279060482978821, -0.07466322183609009, -0.04323504492640495, -0.055605050176382065, -0.021398285403847694, -0.10752646625041962, -0.26673275232315063, -0.010076506994664669, -0.036502543836832047, -0.04978850856423378, 0.01791360415518284, 0.09640439599752426, 0.11488809436559677, 0.06433490663766861, 0.01018968503922224, 0.08574144542217255, -0.023643825203180313, 0.05021388456225395, -0.09121149778366089, -0.09798651933670044 ]
243841aac0daaa75501f23a72fb87d6a5dedd748
Data generated by GPT4. *" My goal as the founder of FreeCS.org is to establish an Open-Source AI Research Lab driven by its Community. Currently, I am the sole contributor at FreeCS.org. If you share our vision, we welcome you to join our community and contribute to our mission at [freecs.org/#community](https://freecs.org/#community). "* |- [GR](https://twitter.com/gr_username) If you'd like to support this project, kindly consider making a [donation](https://freecs.org/donate).
openagi-project/OpenAGI-set-dpo-v0.1
[ "license:mit", "region:us" ]
2024-01-25T18:15:27+00:00
{"license": "mit"}
2024-01-26T16:04:12+00:00
[]
[]
TAGS #license-mit #region-us
Data generated by GPT4. *" My goal as the founder of URL is to establish an Open-Source AI Research Lab driven by its Community. Currently, I am the sole contributor at URL. If you share our vision, we welcome you to join our community and contribute to our mission at URL "* |- GR If you'd like to support this project, kindly consider making a donation.
[]
[ "TAGS\n#license-mit #region-us \n" ]
[ 11 ]
[ "passage: TAGS\n#license-mit #region-us \n" ]
[ 0.026221778243780136, -0.033018264919519424, -0.008281232789158821, -0.05295303836464882, 0.052470896393060684, 0.06768012046813965, 0.1598525494337082, 0.04655371606349945, 0.23683255910873413, -0.05407243221998215, 0.11752297729253769, 0.08923697471618652, 0.004284696187824011, -0.0009730930323712528, 0.014216204173862934, -0.17134642601013184, 0.04864625632762909, -0.02878100797533989, 0.08764812350273132, 0.032233644276857376, -0.006205103360116482, -0.03845774009823799, -0.0022142508532851934, -0.03178790956735611, -0.057939812541007996, 0.03869890421628952, 0.045729056000709534, -0.02754949778318405, 0.14189864695072174, -0.021783310920000076, 0.13335508108139038, 0.046146418899297714, -0.011738095432519913, -0.2486042082309723, 0.008575023151934147, -0.07252951711416245, -0.11333522200584412, 0.016201216727495193, 0.035761721432209015, -0.010069100186228752, 0.032174937427043915, 0.11049123108386993, -0.011680051684379578, 0.06288356333971024, -0.2015703022480011, -0.20486389100551605, -0.07508610188961029, -0.07555478066205978, 0.0589042492210865, 0.030872387811541557, 0.05628744140267372, 0.1426718831062317, -0.18022038042545319, -0.0018841808196157217, 0.04129622131586075, -0.3510737717151642, 0.09011197835206985, 0.19666501879692078, 0.06407395005226135, 0.07872317731380463, -0.04774639382958412, 0.06726468354463577, 0.07745297998189926, -0.02402484230697155, -0.10679105669260025, -0.06142130121588707, 0.040939174592494965, 0.15604156255722046, -0.03852643445134163, -0.10356393456459045, 0.2591084837913513, -0.023262828588485718, -0.04234466329216957, 0.08201269060373306, -0.02980397455394268, -0.040379155427217484, 0.04404358193278313, 0.044016025960445404, 0.036236923187971115, 0.182089164853096, 0.1260262131690979, -0.03375067934393883, -0.16269677877426147, -0.030629513785243034, -0.2528207004070282, 0.07418664544820786, -0.003647059667855501, 0.10666298121213913, -0.20037521421909332, 0.03286786004900932, -0.15483668446540833, -0.009493621066212654, -0.02952384203672409, -0.059835705906152725, 0.05229754373431206, -0.0237403754144907, -0.04600388556718826, 0.07238677144050598, 0.08390641957521439, 0.2046167105436325, 0.023024363443255424, 0.016697337850928307, -0.10405295342206955, 0.15052515268325806, 0.019140364602208138, 0.024860305711627007, 0.179348424077034, 0.07677878439426422, -0.04891882464289665, -0.2251969277858734, 0.027894439175724983, -0.03671982139348984, -0.1441805064678192, 0.015881337225437164, -0.1542915552854538, 0.1736440360546112, -0.04078168794512749, -0.06919530034065247, -0.08578147739171982, 0.09790384024381638, 0.07768166810274124, -0.021921472623944283, -0.023105677217245102, -0.01381723117083311, 0.03522264584898949, -0.048196230083703995, -0.11687057465314865, 0.018241960555315018, 0.11869648098945618, 0.12573401629924774, -0.1483907401561737, -0.008189842104911804, -0.017200417816638947, 0.019065292552113533, 0.09696817398071289, -0.112403005361557, 0.028845038264989853, -0.09672309458255768, -0.13033071160316467, 0.036653537303209305, 0.017736904323101044, -0.019008556380867958, 0.1340927630662918, 0.061849117279052734, 0.056560322642326355, -0.011025321669876575, -0.07250872999429703, -0.14035539329051971, -0.08679798245429993, 0.1058693379163742, -0.046787332743406296, 0.010320915840566158, -0.24556252360343933, -0.014234079979360104, -0.14995723962783813, 0.059662189334630966, -0.0037668521981686354, -0.08819212019443512, -0.07740068435668945, 0.21408265829086304, 0.0018596589798107743, 0.04301392287015915, -0.1078512966632843, 0.054903753101825714, -0.06764797121286392, 0.10065380483865738, -0.12895582616329193, -0.06441528350114822, 0.1613781899213791, -0.13135331869125366, -0.14002031087875366, 0.0033312994055449963, -0.009472889825701714, 0.12053907662630081, 0.0802001804113388, 0.44566696882247925, -0.058881040662527084, -0.16201181709766388, 0.1270403116941452, 0.17969723045825958, -0.13685379922389984, -0.25928929448127747, 0.12393020838499069, -0.1636963188648224, -0.16647985577583313, 0.0040023741312325, -0.006962866988033056, 0.08049977570772171, -0.03446655720472336, -0.056274134665727615, 0.042339932173490524, 0.024350708350539207, 0.029094615951180458, 0.01740112341940403, 0.07037191838026047, -0.1023021712899208, 0.08444856107234955, 0.058610700070858, -0.014111426658928394, 0.15077349543571472, 0.011494536884129047, -0.05393160134553909, 0.014761670492589474, 0.044013332575559616, -0.015627963468432426, -0.05899091437458992, -0.09661509096622467, 0.019826244562864304, -0.031149597838521004, 0.08229395002126694, 0.1699674129486084, 0.023824702948331833, -0.02797185815870762, 0.028922779485583305, 0.028606392443180084, 0.1009954959154129, 0.06960704177618027, 0.03099375218153, -0.04839283227920532, 0.04952205345034599, -0.0417071171104908, -0.11430390179157257, -0.004862460307776928, -0.011735930107533932, 0.11975742131471634, -0.08906009048223495, -0.01223952230066061, 0.05951591953635216, -0.04513183981180191, 0.0019881438929587603, 0.0428374819457531, 0.0035966038703918457, 0.1388600617647171, 0.004440935328602791, -0.04352007433772087, 0.17440910637378693, -0.05288633331656456, 0.15533447265625, 0.1715822070837021, -0.07049662619829178, 0.015605369582772255, -0.1273636519908905, 0.003230511210858822, -0.014480113983154297, 0.05292887985706329, -0.05400136485695839, -0.05201306566596031, -0.01274962443858385, 0.014292534440755844, -0.03134604170918465, 0.01711403578519821, -0.06057267636060715, -0.08167021721601486, -0.10849859565496445, 0.018649224191904068, 0.20683221518993378, -0.22544461488723755, 0.1609548032283783, 0.40251004695892334, 0.15190774202346802, 0.21155193448066711, -0.12478897720575333, -0.002471078187227249, -0.06630261242389679, 0.026115071028470993, -0.024814706295728683, 0.13782677054405212, -0.13174867630004883, -0.01413064356893301, 0.03880728408694267, 0.0454997681081295, 0.0661163181066513, -0.17195898294448853, -0.15260353684425354, -0.0034879595041275024, -0.020591814070940018, -0.1749730259180069, 0.04874620959162712, -0.07595308125019073, 0.02181261032819748, 0.018216799944639206, -0.10832522064447403, 0.16837291419506073, -0.033566512167453766, -0.06695768237113953, 0.052613962441682816, -0.20581911504268646, -0.07900715619325638, -0.17772749066352844, -0.18375012278556824, 0.06050071492791176, 0.05760138854384422, 0.07903145253658295, -0.05951719731092453, -0.01922747679054737, 0.061719246208667755, -0.009363299235701561, -0.13802112638950348, -0.04235544428229332, -0.06993678212165833, 0.08744155615568161, -0.09474305808544159, -0.07518411427736282, -0.07833878695964813, -0.046996138989925385, -0.020961694419384003, 0.08125963062047958, -0.1039251759648323, 0.08903530240058899, 0.1493726521730423, 0.03651920333504677, 0.05440247058868408, -0.08271230012178421, 0.12693379819393158, -0.037743739783763885, -0.09459595382213593, 0.07307634502649307, 0.004350725095719099, 0.04920351505279541, 0.24039287865161896, 0.08962162584066391, -0.10578162968158722, -0.01780811697244644, -0.0968487411737442, -0.16405464708805084, -0.2553846538066864, -0.06823288649320602, -0.08744750916957855, 0.14417944848537445, 0.014636521227657795, 0.10712126642465591, 0.14313316345214844, 0.01343101728707552, 0.10255914181470871, -0.08983208239078522, -0.018939344212412834, 0.031209396198391914, 0.2135104089975357, -0.05208220332860947, 0.00838248711079359, -0.13684824109077454, -0.0256142970174551, 0.14601100981235504, 0.13798639178276062, 0.14503207802772522, 0.31421369314193726, 0.15292863547801971, 0.13410434126853943, 0.13474710285663605, 0.12333164364099503, 0.07403261214494705, 0.03444362059235573, -0.015304201282560825, -0.06035377085208893, -0.003846159903332591, 0.02816268615424633, 0.05421729013323784, 0.06724072247743607, -0.22906480729579926, 0.041139665991067886, -0.2661744952201843, 0.03544611483812332, -0.0854712724685669, 0.1161833181977272, -0.028890252113342285, 0.11051984131336212, 0.11386284977197647, 0.05553818494081497, -0.023278791457414627, 0.16036942601203918, 0.032686375081539154, -0.07703183591365814, 0.020292721688747406, 0.024695809930562973, 0.06633034348487854, 0.08606193959712982, 0.09550496190786362, -0.020778406411409378, -0.1831783503293991, 0.025963841006159782, 0.12212833017110825, -0.20747940242290497, 0.289523184299469, 0.013651901856064796, -0.0743619054555893, -0.01690039224922657, -0.06958060711622238, 0.008433517068624496, 0.12829731404781342, 0.10406835377216339, 0.05508929491043091, -0.2613787055015564, -0.13299626111984253, 0.046764206141233444, -0.00873907096683979, 0.11356569826602936, -0.0052223424427211285, -0.14201195538043976, -0.06640999764204025, 0.05814211815595627, -0.006591420155018568, 0.13023322820663452, -0.018290361389517784, -0.08173255622386932, -0.010230090469121933, 0.055564697831869125, -0.001312803477048874, -0.04580084979534149, 0.07523149996995926, 0.009008137509226799, 0.02259289287030697, -0.08178020268678665, 0.03887253627181053, -0.08071476966142654, -0.25375792384147644, 0.019298138096928596, -0.04987313598394394, 0.004092312417924404, -0.04684043675661087, -0.15448936820030212, -0.1129264086484909, -0.15445278584957123, 0.13100723922252655, -0.03675999864935875, 0.091565802693367, -0.0817658007144928, 0.13736046850681305, -0.08521489799022675, 0.05375019088387489, 0.00614814180880785, 0.03918716683983803, -0.017955513671040535, -0.1031481996178627, 0.09334362298250198, -0.1874227225780487, 0.023863423615694046, 0.010427716188132763, -0.056847453117370605, -0.01354232057929039, 0.03918023407459259, -0.08763083070516586, 0.21879427134990692, 0.3331502079963684, -0.011948764324188232, 0.22546616196632385, 0.35863226652145386, -0.13763751089572906, -0.23258967697620392, -0.1205512136220932, -0.3263251483440399, -0.09005610644817352, 0.17321562767028809, -0.18057219684123993, 0.04850830137729645, 0.16150830686092377, -0.10868281871080399, 0.22499866783618927, -0.22723928093910217, -0.04793389141559601, 0.1823979914188385, -0.038322996348142624, 0.4527989625930786, -0.1144307404756546, -0.1784561723470688, -0.03637253865599632, -0.16285361349582672, 0.12426037341356277, -0.026553882285952568, 0.06700495630502701, 0.02416347898542881, -0.011372359469532967, -0.009014161303639412, -0.04529716446995735, 0.2216065675020218, 0.0522729866206646, 0.10468899458646774, -0.09159468114376068, -0.17199653387069702, 0.1907423883676529, -0.0004908236442133784, -0.003372655250132084, -0.05411549657583237, -0.04850282520055771, -0.06871756166219711, 0.033092137426137924, -0.0334564633667469, 0.06195882335305214, 0.03364093229174614, -0.11903523653745651, -0.10248823463916779, 0.034111104905605316, -0.13155671954154968, -0.054850947111845016, 0.26421889662742615, -0.02080743946135044, 0.09609334170818329, 0.04959092289209366, -0.05474294349551201, -0.13538943231105804, 0.005736751481890678, -0.07534020394086838, -0.05711410939693451, 0.06573604047298431, -0.11453206837177277, -0.024341827258467674, 0.1293732225894928, -0.029497180134058, 0.09674722701311111, 0.08061115443706512, -0.07585363835096359, 0.02032829262316227, 0.15617427229881287, -0.07247176766395569, -0.10849180817604065, 0.04999847710132599, 0.04640531167387962, 0.17256882786750793, 0.004101871978491545, 0.02018604800105095, 0.08726977556943893, 0.045959215611219406, -0.007486662827432156, 0.007311292923986912, -0.11321697384119034, -0.04241771996021271, 0.0387241393327713, -0.005273692775517702, -0.10946331918239594, 0.16008898615837097, 0.056837860494852066, 0.004653505515307188, -0.06027700752019882, 0.09720424562692642, -0.06709636747837067, -0.07046061009168625, -0.1753035932779312, 0.018511172384023666, -0.12734080851078033, -0.09874535351991653, 0.06846235692501068, -0.09371624886989594, -0.04084605351090431, 0.08152704685926437, 0.046927981078624725, 0.14401860535144806, -0.006597559433430433, -0.023080874234437943, 0.149825319647789, -0.0884878933429718, -0.2241756170988083, 0.01969664730131626, -0.04083063453435898, -0.07065816223621368, -0.0007070365245454013, 0.06069544702768326, -0.0663156732916832, -0.11958606541156769, -0.20477768778800964, 0.10412076860666275, -0.12043121457099915, -0.03954985365271568, -0.1041841059923172, -0.053260523825883865, 0.07891252636909485, -0.02613759972155094, -0.04122013971209526, -0.047595683485269547, -0.16630595922470093, 0.054254453629255295, 0.07140932232141495, 0.11125344783067703, -0.0759999230504036, -0.018354382365942, 0.1398727148771286, 0.048581548035144806, 0.08479110151529312, 0.07578440010547638, 0.026255371049046516, 0.16728560626506805, -0.1708206981420517, -0.0542997270822525, 0.1068294569849968, -0.026716172695159912, 0.01994573324918747, 0.10631280392408371, -0.04839588701725006, 0.07042654603719711, -0.05095988139510155, 0.05859163776040077, -0.15704534947872162, -0.13073866069316864, -0.04184387996792793, 0.023728877305984497, -0.2260182797908783, 0.015071595087647438, -0.1769561767578125, 0.19692228734493256, -0.024228032678365707, 0.11490963399410248, 0.08052190393209457, 0.02052290178835392, 0.03539382666349411, -0.006019921973347664, 0.00946811307221651, -0.10524865239858627, -0.05784677714109421, -0.07560300827026367, -0.1168874129652977, -0.009665017947554588, 0.36614301800727844, 0.02430291846394539, -0.19682736694812775, 0.051222387701272964, 0.18285293877124786, 0.023639049381017685, -0.0073763905093073845, 0.26180747151374817, 0.08150359988212585, -0.023175053298473358, -0.1782374382019043, 0.0396091528236866, -0.08699734508991241, -0.15269799530506134, 0.11385007947683334, 0.09347525984048843, 0.05813581123948097, 0.022930078208446503, 0.10404518246650696, -0.035940010100603104, -0.05509711429476738, -0.13301853835582733, 0.13368983566761017, -0.001790675800293684, 0.0193882267922163, 0.0897885113954544, 0.19249756634235382, -0.045275162905454636, 0.05437124893069267, -0.07336640357971191, -0.001598604372702539, -0.15740543603897095, -0.13358698785305023, 0.06194563955068588, -0.08269550651311874, 0.06342913210391998, 0.050261519849300385, 0.04341990500688553, 0.31786394119262695, 0.039095040410757065, -0.046439893543720245, 0.003166865324601531, -0.14845187962055206, -0.08075450360774994, -0.06024569645524025, -0.03110554814338684, 0.028620192781090736, -0.13928957283496857, -0.09898591786623001, -0.06917677819728851, -0.130235955119133, -0.06539803743362427, 0.025270747020840645, 0.014251931570470333, -0.053083837032318115, -0.17625881731510162, -0.04808593541383743, -0.06644169986248016, 0.10105955600738525, -0.08462738990783691, 0.1516820639371872, 0.0022449472453445196, 0.030281953513622284, 0.07627002149820328, 0.09585131704807281, 0.018900424242019653, -0.06975197046995163, 0.05599058046936989, 0.12436293810606003, 0.01323844213038683, 0.1259988248348236, -0.06034265458583832, -0.019420607015490532, -0.014145253226161003, 0.14038437604904175, 0.304447740316391, -0.01856905221939087, -0.013814439997076988, -0.022110093384981155, 0.021388787776231766, 0.10893569141626358, 0.19800719618797302, -0.03437356278300285, 0.2551359534263611, -0.058974795043468475, 0.0756678432226181, -0.013180435635149479, -0.005362013820558786, -0.053146667778491974, 0.06074550002813339, 0.06268858164548874, -0.06877048313617706, -0.10191375762224197, 0.15178529918193817, -0.14985080063343048, 0.13306055963039398, 0.14678068459033966, -0.06057753041386604, 0.03797250986099243, 0.0007459368789568543, 0.19896264374256134, -0.03570213168859482, 0.0984780564904213, -0.10653308779001236, -0.10261140763759613, -0.14764924347400665, 0.037690844386816025, -0.36797797679901123, -0.1756322830915451, 0.11731542646884918, 0.14115898311138153, 0.1759258657693863, -0.012341637164354324, 0.056479312479496, 0.0033020609989762306, 0.08296097069978714, -0.04232487455010414, 0.1519634872674942, 0.0612073615193367, -0.017103128135204315, -0.15296664834022522, -0.20328094065189362, -0.0012039330322295427, -0.058561209589242935, 0.055583830922842026, -0.02269243635237217, 0.025347469374537468, 0.07746459543704987, -0.06768939644098282, -0.029180381447076797, -0.02352982573211193, -0.13262848556041718, 0.052229251712560654, -0.04354005306959152, 0.0320255309343338, -0.03958037868142128, -0.022394726052880287, -0.039987675845623016, 0.10721533745527267, -0.22402705252170563, -0.08517231047153473, 0.1422796994447708, -0.03421911224722862, 0.1542559564113617, -0.02848726324737072, -0.12159585952758789, -0.024955326691269875, -0.06977712363004684, 0.10887379199266434, -0.1419300138950348, 0.038592495024204254, 0.13747453689575195, 0.008710617199540138, 0.031119761988520622, -0.2533661723136902, 0.050644006580114365, -0.03556957095861435, -0.016733208671212196, -0.057031940668821335 ]
bab96a64f3c87d895aa9c2ca2e3dd7d7ac558994
# 🚀 Load Dataset ```python from datasets import load_dataset dataset = load_dataset("shuyuej/prompt_consistency_training") dataset = dataset["train"] print(dataset) ```
shuyuej/prompt_consistency_training
[ "license:apache-2.0", "region:us" ]
2024-01-25T18:21:19+00:00
{"license": "apache-2.0"}
2024-01-25T19:33:22+00:00
[]
[]
TAGS #license-apache-2.0 #region-us
# Load Dataset
[ "# Load Dataset" ]
[ "TAGS\n#license-apache-2.0 #region-us \n", "# Load Dataset" ]
[ 14, 5 ]
[ "passage: TAGS\n#license-apache-2.0 #region-us \n# Load Dataset" ]
[ -0.07001654803752899, 0.1904653012752533, -0.004998877178877592, 0.00907934457063675, -0.0028538380283862352, 0.054394908249378204, 0.16303427517414093, 0.12618236243724823, 0.1763383150100708, -0.06096614897251129, 0.09710494428873062, 0.04643046483397484, 0.020282577723264694, 0.11439700424671173, 0.015474379062652588, -0.10245008021593094, 0.11215199530124664, -0.03296778351068497, -0.15413860976696014, -0.020041679963469505, 0.06634549796581268, 0.008282235823571682, 0.018562356010079384, -0.06968339532613754, -0.006373110227286816, 0.03640283644199371, 0.0020335251465439796, 0.016162084415555, 0.02330676279962063, -0.038277264684438705, 0.005731794983148575, 0.029110433533787727, 0.03391251340508461, -0.20263391733169556, 0.002159419935196638, 0.011697918176651001, -0.072933629155159, 0.021473249420523643, 0.015562492422759533, 0.013006992638111115, -0.014430290088057518, 0.008538242429494858, -0.04590527340769768, 0.004893551114946604, -0.029969552531838417, -0.23031283915042877, -0.13999857008457184, 0.03098377026617527, 0.061859868466854095, 0.04261074587702751, 0.08520697802305222, 0.112308070063591, -0.1684703677892685, -0.023576728999614716, 0.05644429847598076, -0.2658948600292206, 0.03770916908979416, 0.1925373077392578, -0.027282025665044785, 0.05708051100373268, -0.032640136778354645, 0.014179641380906105, 0.09702048450708389, -0.03234678879380226, -0.05952118709683418, -0.019281448796391487, -0.14307193458080292, 0.12061256915330887, 0.007947882637381554, -0.07937075942754745, 0.4276890456676483, 0.08947485685348511, 0.03446684032678604, 0.04088602960109711, -0.04162471741437912, 0.08802156895399094, -0.00882687047123909, 0.11228219419717789, 0.10370917618274689, 0.17537449300289154, 0.09373819082975388, -0.05837281793355942, -0.1387629508972168, -0.0764801874756813, -0.14893755316734314, -0.0881175547838211, 0.002753246808424592, 0.14657826721668243, -0.11670559644699097, -0.011121216230094433, -0.013173341751098633, -0.08000656962394714, -0.016438452526926994, -0.06224498152732849, 0.05970108509063721, 0.07276752591133118, -0.06468930095434189, 0.08973073959350586, 0.19600972533226013, 0.24304203689098358, 0.10773353278636932, -0.0031716858502477407, -0.09783545881509781, 0.1311764270067215, 0.02450001984834671, 0.0035947742871940136, 0.031216908246278763, -0.03225255012512207, 0.1274290233850479, -0.11814776808023453, 0.10293827950954437, -0.03236187621951103, -0.12062004208564758, -0.015531999059021473, -0.12012672424316406, 0.0924612358212471, 0.12156467139720917, -0.07433512806892395, -0.02871553599834442, -0.0001638674148125574, 0.1914612352848053, -0.07343824207782745, 0.011609512381255627, 0.037060488015413284, -0.04014584422111511, 0.019367489963769913, 0.07047323882579803, 0.01618986763060093, 0.051165465265512466, -0.07353158295154572, -0.07518532872200012, -0.04754794016480446, 0.006732400972396135, 0.08390267193317413, 0.16801878809928894, -0.0787782073020935, 0.02774466574192047, -0.07314363867044449, -0.2640254497528076, 0.015318970195949078, 0.10718841850757599, 0.01326839905232191, -0.04644060134887695, 0.0937519520521164, 0.04369577765464783, 0.0219112578779459, -0.07621479034423828, 0.021636027842760086, -0.10924455523490906, 0.01635110005736351, -0.18735843896865845, -0.00034857707214541733, -0.21728886663913727, 0.029500821605324745, -0.14498181641101837, 0.015984103083610535, 0.008336689323186874, -0.036203473806381226, -0.16693279147148132, 0.18584993481636047, -0.12228277325630188, 0.07665608078241348, -0.022888079285621643, -0.027417439967393875, -0.05536381155252457, 0.10486848652362823, -0.1901201605796814, 0.013646816834807396, 0.11956170201301575, -0.1315540224313736, -0.1981583535671234, -0.008748158812522888, 0.032004132866859436, 0.03905763477087021, 0.019999176263809204, 0.2750741243362427, 0.03931259736418724, -0.039619818329811096, 0.05513544753193855, 0.21230784058570862, -0.037666887044906616, -0.30606216192245483, 0.1337161511182785, -0.14175526797771454, -0.09446083754301071, 0.035942576825618744, -0.05984149128198624, 0.10572918504476547, 0.05563924461603165, -0.09842493385076523, -0.05832784250378609, -0.11846277117729187, -0.05080516263842583, -0.007119585759937763, 0.011414838954806328, -0.003772859927266836, 0.06987990438938141, -0.03394223004579544, 0.12409792840480804, 0.0430963933467865, 0.06995843350887299, -0.0019742429722100496, 0.010701647028326988, -0.032948900014162064, 0.01685371808707714, -0.05274543538689613, -0.11824917048215866, 0.03222540020942688, -0.07840131968259811, 0.010248200036585331, 0.0754818469285965, 0.08918923884630203, -0.10875682532787323, 0.004986066371202469, 0.054788678884506226, 0.05776919424533844, 0.09128472954034805, 0.004639789462089539, -0.1553730070590973, 0.03658977895975113, -0.007291417568922043, 0.11663542687892914, 0.013290156610310078, -0.015077928081154823, 0.02553686872124672, 0.030704280361533165, -0.01582513377070427, 0.021987546235322952, 0.012808484956622124, -0.15357114374637604, 0.013035740703344345, -0.029535915702581406, 0.0515626035630703, 0.07516250759363174, -0.10649967193603516, 0.17749257385730743, 0.05246195197105408, 0.0937701091170311, 0.1691731959581375, -0.0031942089553922415, 0.15303511917591095, -0.07711337506771088, -0.03034058026969433, -0.08476773649454117, 0.019114429131150246, -0.03520386293530464, -0.19912157952785492, 0.022483263164758682, -0.006603246089071035, -0.03966274857521057, 0.011731458827853203, -0.05206333473324776, -0.05032000690698624, -0.01633274368941784, -0.012912747450172901, 0.22682836651802063, -0.0740542858839035, 0.16127049922943115, 0.3929736912250519, 0.009790927171707153, 0.03198212757706642, -0.15585008263587952, -0.08811907470226288, 0.025037283077836037, -0.02013486996293068, -0.07733090966939926, 0.1421229988336563, -0.08407855778932571, 0.07925056666135788, 0.11559281498193741, 0.07515023648738861, 0.048295337706804276, -0.09319842606782913, -0.08565586805343628, -0.016870850697159767, -0.06494379788637161, -0.0757361501455307, 0.006179455202072859, -0.09064626693725586, 0.038997307419776917, -0.009553880430758, -0.08711880445480347, 0.1416091024875641, -0.06720657646656036, -0.07858653366565704, 0.08501026779413223, -0.1773792803287506, -0.017761990427970886, -0.04987482354044914, -0.05524556711316109, -0.05987956374883652, -0.011223694309592247, 0.03956194967031479, -0.07345953583717346, -0.06481890380382538, -0.0044582197442650795, -0.11862372606992722, 0.05133015662431717, -0.002835777821019292, -0.001831702538765967, 0.07515180855989456, 0.020108293741941452, -0.13118132948875427, -0.020894730463624, 0.014332571998238564, -0.011530141346156597, 0.020547814667224884, -0.12734408676624298, 0.08069302886724472, 0.11161760985851288, 0.07985270023345947, 0.040471069514751434, -0.002052333438768983, 0.08670412749052048, -0.0023793831933289766, 0.0037481726612895727, 0.16268162429332733, 0.012608258984982967, 0.018607959151268005, 0.11706527322530746, 0.05305314064025879, -0.048632312566041946, 0.0070783342234790325, 0.014489368535578251, -0.11434327811002731, -0.3088737428188324, -0.126278817653656, -0.0808558538556099, 0.07748565077781677, 0.08287206292152405, 0.14348120987415314, 0.05805513635277748, 0.07937482744455338, -0.020483603700995445, 0.006435450632125139, 0.013887143693864346, -0.036056581884622574, 0.031847286969423294, -0.035910844802856445, -0.01373085007071495, -0.17227716743946075, 0.07147528976202011, 0.19122646749019623, 0.1603180468082428, 0.19476060569286346, 0.19143876433372498, 0.13832567632198334, 0.09698405116796494, 0.18953464925289154, -0.040424197912216187, 0.12604868412017822, 0.058183349668979645, 0.027232512831687927, -0.01953265815973282, -0.0491039864718914, -0.0196559876203537, 0.10182230174541473, 0.03333480656147003, -0.174533411860466, 0.03443571925163269, -0.16347607970237732, 0.07128030061721802, 0.12810580432415009, 0.09226731956005096, -0.07216334342956543, 0.11737333983182907, 0.12907880544662476, 0.11842317879199982, 0.0372268371284008, 0.13275204598903656, -0.0738038718700409, -0.04381672292947769, 0.11593903601169586, 0.027494866400957108, 0.10757297277450562, 0.006952364929020405, -0.0514223575592041, -0.05423459783196449, -0.1831110715866089, 0.07742337882518768, 0.1742519736289978, -0.11897630244493484, 0.1527315080165863, 0.007360770832747221, -0.07842399924993515, -0.10153786838054657, -0.035853754729032516, 0.07475770264863968, 0.13980795443058014, 0.10237900912761688, 0.09461832791566849, -0.16316671669483185, 0.1071644052863121, -0.18991471827030182, 0.03761889785528183, -0.06512397527694702, -0.02938513644039631, -0.13786184787750244, -0.03136984631419182, 0.018966924399137497, 0.04204443097114563, 0.14526686072349548, -0.08443856239318848, -0.0934140607714653, -0.03135392814874649, 0.16512109339237213, -0.07707548886537552, -0.09012707322835922, 0.04150933399796486, -0.01470975112169981, 0.13258203864097595, -0.0013804734917357564, -0.03641519695520401, -0.0624312199652195, -0.13648180663585663, 0.12659449875354767, -0.006370837800204754, -0.01564682088792324, -0.04408372566103935, -0.053515125066041946, -0.07499849051237106, -0.22749395668506622, 0.09926195442676544, -0.11815635859966278, 0.029708122834563255, -0.05135143920779228, 0.08055438846349716, -0.04865441098809242, 0.011574150063097477, 0.013341099955141544, 0.00196447572670877, -0.04447980970144272, -0.12119755893945694, 0.07120175659656525, 0.05407170578837395, 0.0179436057806015, 0.07252787798643112, -0.05706556513905525, 0.04152284935116768, 0.139897882938385, -0.08164766430854797, 0.1449587196111679, 0.16808827221393585, -0.08507421612739563, 0.16733917593955994, 0.3047759532928467, -0.09884221851825714, -0.27654603123664856, -0.13852934539318085, -0.22699107229709625, -0.1497262865304947, 0.05740240216255188, -0.16743381321430206, 0.1785702109336853, 0.15324127674102783, -0.17003217339515686, 0.15567822754383087, -0.20013009011745453, -0.04761470854282379, 0.22139687836170197, -0.07744777947664261, 0.3270156681537628, -0.15036125481128693, -0.06321949511766434, -0.1357784867286682, -0.14006933569908142, 0.1640874743461609, -0.2520224452018738, 0.009696963243186474, 0.03358783572912216, -0.07571630924940109, -0.053476277738809586, -0.06294088065624237, 0.20365726947784424, 0.08864487707614899, 0.048724330961704254, -0.07783883810043335, 0.0538349449634552, 0.17002438008785248, -0.08258774876594543, 0.11427507549524307, -0.1551506072282791, -0.007821562699973583, -0.11334814876317978, 0.049372438341379166, -0.007536802440881729, 0.07303659617900848, 0.018435997888445854, -0.055110782384872437, -0.09097205847501755, -0.012564500793814659, -0.0010017354506999254, 0.017773348838090897, 0.2545160949230194, 0.12870335578918457, -0.09068934619426727, 0.12058752775192261, -0.08183950185775757, -0.10650019347667694, -0.09857156872749329, -0.09697787463665009, -0.09271606057882309, 0.05486408993601799, -0.29307621717453003, 0.06150501221418381, 0.04435229301452637, -0.056741055101156235, 0.021166298538446426, 0.04856487363576889, -0.07821919023990631, -0.047246869653463364, 0.10806192457675934, -0.05617867782711983, 0.0060365828685462475, 0.06188586354255676, 0.06320629268884659, 0.01908440701663494, 0.015047809109091759, 0.07980872690677643, 0.02779097482562065, 0.03299710527062416, 0.02156687341630459, 0.1232525184750557, -0.1096741333603859, 0.025780048221349716, 0.08234716206789017, -0.03277469798922539, -0.1291341930627823, 0.27868539094924927, 0.0328671969473362, -0.07490034401416779, -0.014853513799607754, 0.02017960511147976, -0.08733731508255005, -0.11030066758394241, 0.03357265517115593, 0.05562034994363785, -0.0790076032280922, -0.16002187132835388, 0.04434940963983536, -0.04750889167189598, -0.011495170183479786, -0.09123263508081436, 0.12545834481716156, 0.11386078596115112, 0.07958052307367325, -0.08150102943181992, 0.09422098100185394, -0.015888547524809837, -0.11720315366983414, -0.00965417642146349, -0.04330809786915779, -0.27147001028060913, 0.0114149060100317, 0.07767054438591003, -0.01839832030236721, -0.024710092693567276, -0.05538126826286316, 0.068058542907238, -0.18358135223388672, 0.023774465546011925, -0.05291692540049553, 0.013881206512451172, 0.0013482654467225075, -0.06351982802152634, -0.013056534342467785, 0.018374785780906677, -0.11782747507095337, -0.05090685561299324, -0.03275580331683159, 0.07783249765634537, -0.16030682623386383, -0.0877394899725914, 0.11273340880870819, 0.03247608616948128, 0.1116083636879921, 0.11100348085165024, 0.0029238115530461073, 0.09652310609817505, -0.08867620676755905, -0.10137758404016495, 0.028416428714990616, 0.05850553140044212, -0.004717225208878517, 0.0338771790266037, -0.08107476681470871, 0.0965980663895607, -0.08487499505281448, 0.0017951868940144777, -0.03430997580289841, -0.11891184002161026, -0.1062634065747261, -0.07948566228151321, -0.1201784610748291, 0.039140596985816956, -0.16233326494693756, 0.17395327985286713, 0.09610553085803986, 0.10544848442077637, 0.07115017622709274, -0.017539754509925842, -0.051834944635629654, 0.0012812841450795531, -0.03794896602630615, -0.046503376215696335, -0.12840472161769867, 0.03780418261885643, -0.07327639311552048, -0.09395363926887512, 0.3460042476654053, -0.03070560283958912, -0.12798358500003815, 0.048227906227111816, 0.14436577260494232, 0.05600818246603012, -0.00208035996183753, 0.2746630012989044, 0.046842265874147415, 0.03356502205133438, -0.05823233723640442, 0.006521868985146284, 0.05556127429008484, -0.07734274864196777, 0.005020815413445234, 0.05489637702703476, 0.12683485448360443, 0.04395321011543274, 0.04137979447841644, -0.1202191486954689, -0.02280261740088463, 0.010061034001410007, 0.08806835114955902, 0.06402159482240677, 0.03048405982553959, 0.09209851920604706, 0.11836949735879898, -0.03152010589838028, -0.016634559258818626, -0.03362197056412697, 0.015894491225481033, -0.16492749750614166, -0.13232536613941193, -0.02111920900642872, -0.1728866845369339, -0.0008740238845348358, -0.003787984373047948, -0.04413442686200142, 0.26616278290748596, 0.05202426016330719, -0.014622442424297333, -0.0725850760936737, -0.14192940294742584, 0.008088779635727406, -0.0745602697134018, -0.025967802852392197, -0.0486544668674469, 0.029924146831035614, -0.07588427513837814, 0.02722756192088127, -0.05634620040655136, -0.06126495078206062, 0.059751976281404495, 0.09817507117986679, 0.1141197606921196, -0.06400559097528458, -0.037795860320329666, -0.12780417501926422, 0.005348340142518282, -0.031182952225208282, 0.19150452315807343, 0.0732182040810585, 0.07031357288360596, 0.11209698021411896, 0.07329583913087845, -0.047418445348739624, -0.11615771055221558, -0.050271112471818924, -0.035710208117961884, -0.03748736158013344, 0.04092748463153839, -0.03058774583041668, -0.03980829566717148, -0.04853656142950058, 0.19831372797489166, 0.26129576563835144, -0.08521967381238937, -0.0005671381950378418, -0.0002930442860815674, 0.003739734645932913, 0.00204873806796968, 0.1472928375005722, 0.05853525921702385, 0.11207357794046402, -0.051810771226882935, -0.0021520762238651514, -0.04380796104669571, -0.02876044623553753, -0.16162092983722687, 0.07960914820432663, -0.03894373029470444, -0.10289120674133301, -0.030670279636979103, 0.14215749502182007, -0.06743727624416351, 0.07221474498510361, 0.06153428182005882, -0.055546604096889496, -0.0187344029545784, -0.01849968172609806, 0.15872111916542053, 0.0667259618639946, 0.006397204007953405, -0.11801804602146149, 0.009210037998855114, 0.04581350460648537, -0.041233129799366, -0.30970680713653564, -0.18251237273216248, 0.08559861034154892, 0.09826260805130005, 0.2934248149394989, 0.01480457279831171, 0.1105048730969429, 0.012399069964885712, 0.023192690685391426, -0.17247705161571503, 0.06305760145187378, 0.02640485018491745, -0.04569758474826813, -0.06197669357061386, -0.21027415990829468, -0.15482737123966217, -0.012660115025937557, 0.07564311474561691, 0.07467310130596161, -0.013488510623574257, 0.1677602231502533, -0.04988284036517143, -0.08345402032136917, -0.02381213940680027, -0.11725359410047531, 0.09443474560976028, -0.06419520080089569, -0.07034330070018768, -0.07380110025405884, -0.04098692163825035, 0.006217553745955229, 0.03104177676141262, -0.26473453640937805, -0.035998400300741196, 0.14492513239383698, -0.0039027442689985037, 0.11870933324098587, 0.06679368764162064, 0.06207719072699547, 0.021644730120897293, -0.05295679718255997, -0.00618883827701211, -0.061871565878391266, 0.044328924268484116, 0.07077302038669586, -0.0393492691218853, 0.0051862443797290325, -0.09827139228582382, 0.04674050584435463, -0.040249425917863846, -0.05528632551431656, -0.1310999095439911 ]
0e313729ab7dc3b1da0a56c3263ad02d8d35dccf
# Dataset Card for Evaluation run of CultriX/OmniTrixAI <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [CultriX/OmniTrixAI](https://huggingface.co/CultriX/OmniTrixAI) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_CultriX__OmniTrixAI", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-01-25T18:33:17.623853](https://huggingface.co/datasets/open-llm-leaderboard/details_CultriX__OmniTrixAI/blob/main/results_2024-01-25T18-33-17.623853.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.6570855389090889, "acc_stderr": 0.032026790322626036, "acc_norm": 0.6565386428162296, "acc_norm_stderr": 0.03269491131174325, "mc1": 0.5691554467564259, "mc1_stderr": 0.01733527247533237, "mc2": 0.7012232532865607, "mc2_stderr": 0.01495517671484403 }, "harness|arc:challenge|25": { "acc": 0.7056313993174061, "acc_stderr": 0.01331852846053942, "acc_norm": 0.7295221843003413, "acc_norm_stderr": 0.012980954547659556 }, "harness|hellaswag|10": { "acc": 0.7145986855208126, "acc_stderr": 0.004506824094333298, "acc_norm": 0.8851822346146186, "acc_norm_stderr": 0.003181503506054324 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.35, "acc_stderr": 0.0479372485441102, "acc_norm": 0.35, "acc_norm_stderr": 0.0479372485441102 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.674074074074074, "acc_stderr": 0.040491220417025055, "acc_norm": 0.674074074074074, "acc_norm_stderr": 0.040491220417025055 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.7105263157894737, "acc_stderr": 0.03690677986137283, "acc_norm": 0.7105263157894737, "acc_norm_stderr": 0.03690677986137283 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.64, "acc_stderr": 0.04824181513244218, "acc_norm": 0.64, "acc_norm_stderr": 0.04824181513244218 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.7094339622641509, "acc_stderr": 0.02794321998933714, "acc_norm": 0.7094339622641509, "acc_norm_stderr": 0.02794321998933714 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.7708333333333334, "acc_stderr": 0.03514697467862388, "acc_norm": 0.7708333333333334, "acc_norm_stderr": 0.03514697467862388 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.48, "acc_stderr": 0.050211673156867795, "acc_norm": 0.48, "acc_norm_stderr": 0.050211673156867795 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.54, "acc_stderr": 0.05009082659620333, "acc_norm": 0.54, "acc_norm_stderr": 0.05009082659620333 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.32, "acc_stderr": 0.046882617226215034, "acc_norm": 0.32, "acc_norm_stderr": 0.046882617226215034 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.6705202312138728, "acc_stderr": 0.03583901754736412, "acc_norm": 0.6705202312138728, "acc_norm_stderr": 0.03583901754736412 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.4411764705882353, "acc_stderr": 0.049406356306056595, "acc_norm": 0.4411764705882353, "acc_norm_stderr": 0.049406356306056595 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.77, "acc_stderr": 0.04229525846816506, "acc_norm": 0.77, "acc_norm_stderr": 0.04229525846816506 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.5829787234042553, "acc_stderr": 0.03223276266711712, "acc_norm": 0.5829787234042553, "acc_norm_stderr": 0.03223276266711712 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.5087719298245614, "acc_stderr": 0.04702880432049615, "acc_norm": 0.5087719298245614, "acc_norm_stderr": 0.04702880432049615 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.5586206896551724, "acc_stderr": 0.04137931034482757, "acc_norm": 0.5586206896551724, "acc_norm_stderr": 0.04137931034482757 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.41005291005291006, "acc_stderr": 0.02533120243894443, "acc_norm": 0.41005291005291006, "acc_norm_stderr": 0.02533120243894443 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.4523809523809524, "acc_stderr": 0.044518079590553275, "acc_norm": 0.4523809523809524, "acc_norm_stderr": 0.044518079590553275 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.33, "acc_stderr": 0.04725815626252604, "acc_norm": 0.33, "acc_norm_stderr": 0.04725815626252604 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.7967741935483871, "acc_stderr": 0.02289168798455496, "acc_norm": 0.7967741935483871, "acc_norm_stderr": 0.02289168798455496 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.4876847290640394, "acc_stderr": 0.035169204442208966, "acc_norm": 0.4876847290640394, "acc_norm_stderr": 0.035169204442208966 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.7, "acc_stderr": 0.046056618647183814, "acc_norm": 0.7, "acc_norm_stderr": 0.046056618647183814 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.7696969696969697, "acc_stderr": 0.0328766675860349, "acc_norm": 0.7696969696969697, "acc_norm_stderr": 0.0328766675860349 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.797979797979798, "acc_stderr": 0.02860620428922987, "acc_norm": 0.797979797979798, "acc_norm_stderr": 0.02860620428922987 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.9015544041450777, "acc_stderr": 0.021500249576033456, "acc_norm": 0.9015544041450777, "acc_norm_stderr": 0.021500249576033456 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.6794871794871795, "acc_stderr": 0.02366129639396428, "acc_norm": 0.6794871794871795, "acc_norm_stderr": 0.02366129639396428 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.35555555555555557, "acc_stderr": 0.029185714949857416, "acc_norm": 0.35555555555555557, "acc_norm_stderr": 0.029185714949857416 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.6848739495798319, "acc_stderr": 0.030176808288974337, "acc_norm": 0.6848739495798319, "acc_norm_stderr": 0.030176808288974337 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.3576158940397351, "acc_stderr": 0.03913453431177258, "acc_norm": 0.3576158940397351, "acc_norm_stderr": 0.03913453431177258 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.8422018348623853, "acc_stderr": 0.01563002297009244, "acc_norm": 0.8422018348623853, "acc_norm_stderr": 0.01563002297009244 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.5231481481481481, "acc_stderr": 0.03406315360711507, "acc_norm": 0.5231481481481481, "acc_norm_stderr": 0.03406315360711507 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.8431372549019608, "acc_stderr": 0.02552472232455335, "acc_norm": 0.8431372549019608, "acc_norm_stderr": 0.02552472232455335 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.7890295358649789, "acc_stderr": 0.026558372502661916, "acc_norm": 0.7890295358649789, "acc_norm_stderr": 0.026558372502661916 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.6816143497757847, "acc_stderr": 0.03126580522513713, "acc_norm": 0.6816143497757847, "acc_norm_stderr": 0.03126580522513713 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.7938931297709924, "acc_stderr": 0.03547771004159465, "acc_norm": 0.7938931297709924, "acc_norm_stderr": 0.03547771004159465 }, "harness|hendrycksTest-international_law|5": { "acc": 0.7933884297520661, "acc_stderr": 0.03695980128098824, "acc_norm": 0.7933884297520661, "acc_norm_stderr": 0.03695980128098824 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.7592592592592593, "acc_stderr": 0.04133119440243838, "acc_norm": 0.7592592592592593, "acc_norm_stderr": 0.04133119440243838 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.7607361963190185, "acc_stderr": 0.0335195387952127, "acc_norm": 0.7607361963190185, "acc_norm_stderr": 0.0335195387952127 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.4732142857142857, "acc_stderr": 0.047389751192741546, "acc_norm": 0.4732142857142857, "acc_norm_stderr": 0.047389751192741546 }, "harness|hendrycksTest-management|5": { "acc": 0.7864077669902912, "acc_stderr": 0.040580420156460344, "acc_norm": 0.7864077669902912, "acc_norm_stderr": 0.040580420156460344 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8760683760683761, "acc_stderr": 0.021586494001281365, "acc_norm": 0.8760683760683761, "acc_norm_stderr": 0.021586494001281365 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.71, "acc_stderr": 0.045604802157206845, "acc_norm": 0.71, "acc_norm_stderr": 0.045604802157206845 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.8314176245210728, "acc_stderr": 0.013387895731543604, "acc_norm": 0.8314176245210728, "acc_norm_stderr": 0.013387895731543604 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.7485549132947977, "acc_stderr": 0.02335736578587403, "acc_norm": 0.7485549132947977, "acc_norm_stderr": 0.02335736578587403 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.4346368715083799, "acc_stderr": 0.016578997435496717, "acc_norm": 0.4346368715083799, "acc_norm_stderr": 0.016578997435496717 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.7254901960784313, "acc_stderr": 0.025553169991826524, "acc_norm": 0.7254901960784313, "acc_norm_stderr": 0.025553169991826524 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.7170418006430869, "acc_stderr": 0.025583062489984813, "acc_norm": 0.7170418006430869, "acc_norm_stderr": 0.025583062489984813 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.7469135802469136, "acc_stderr": 0.024191808600712995, "acc_norm": 0.7469135802469136, "acc_norm_stderr": 0.024191808600712995 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.49645390070921985, "acc_stderr": 0.02982674915328092, "acc_norm": 0.49645390070921985, "acc_norm_stderr": 0.02982674915328092 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.47327249022164275, "acc_stderr": 0.012751977967676012, "acc_norm": 0.47327249022164275, "acc_norm_stderr": 0.012751977967676012 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.6764705882352942, "acc_stderr": 0.02841820861940676, "acc_norm": 0.6764705882352942, "acc_norm_stderr": 0.02841820861940676 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.6764705882352942, "acc_stderr": 0.018926082916083383, "acc_norm": 0.6764705882352942, "acc_norm_stderr": 0.018926082916083383 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6727272727272727, "acc_stderr": 0.0449429086625209, "acc_norm": 0.6727272727272727, "acc_norm_stderr": 0.0449429086625209 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.7346938775510204, "acc_stderr": 0.028263889943784593, "acc_norm": 0.7346938775510204, "acc_norm_stderr": 0.028263889943784593 }, "harness|hendrycksTest-sociology|5": { "acc": 0.8258706467661692, "acc_stderr": 0.026814951200421603, "acc_norm": 0.8258706467661692, "acc_norm_stderr": 0.026814951200421603 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.87, "acc_stderr": 0.033799766898963086, "acc_norm": 0.87, "acc_norm_stderr": 0.033799766898963086 }, "harness|hendrycksTest-virology|5": { "acc": 0.5662650602409639, "acc_stderr": 0.03858158940685516, "acc_norm": 0.5662650602409639, "acc_norm_stderr": 0.03858158940685516 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.8421052631578947, "acc_stderr": 0.027966785859160893, "acc_norm": 0.8421052631578947, "acc_norm_stderr": 0.027966785859160893 }, "harness|truthfulqa:mc|0": { "mc1": 0.5691554467564259, "mc1_stderr": 0.01733527247533237, "mc2": 0.7012232532865607, "mc2_stderr": 0.01495517671484403 }, "harness|winogrande|5": { "acc": 0.8358326756116812, "acc_stderr": 0.010410849775222782 }, "harness|gsm8k|5": { "acc": 0.7050796057619408, "acc_stderr": 0.012560698010954772 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_CultriX__OmniTrixAI
[ "region:us" ]
2024-01-25T18:35:36+00:00
{"pretty_name": "Evaluation run of CultriX/OmniTrixAI", "dataset_summary": "Dataset automatically created during the evaluation run of model [CultriX/OmniTrixAI](https://huggingface.co/CultriX/OmniTrixAI) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_CultriX__OmniTrixAI\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-01-25T18:33:17.623853](https://huggingface.co/datasets/open-llm-leaderboard/details_CultriX__OmniTrixAI/blob/main/results_2024-01-25T18-33-17.623853.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.6570855389090889,\n \"acc_stderr\": 0.032026790322626036,\n \"acc_norm\": 0.6565386428162296,\n \"acc_norm_stderr\": 0.03269491131174325,\n \"mc1\": 0.5691554467564259,\n \"mc1_stderr\": 0.01733527247533237,\n \"mc2\": 0.7012232532865607,\n \"mc2_stderr\": 0.01495517671484403\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.7056313993174061,\n \"acc_stderr\": 0.01331852846053942,\n \"acc_norm\": 0.7295221843003413,\n \"acc_norm_stderr\": 0.012980954547659556\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.7145986855208126,\n \"acc_stderr\": 0.004506824094333298,\n \"acc_norm\": 0.8851822346146186,\n \"acc_norm_stderr\": 0.003181503506054324\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.35,\n \"acc_stderr\": 0.0479372485441102,\n \"acc_norm\": 0.35,\n \"acc_norm_stderr\": 0.0479372485441102\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.674074074074074,\n \"acc_stderr\": 0.040491220417025055,\n \"acc_norm\": 0.674074074074074,\n \"acc_norm_stderr\": 0.040491220417025055\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.7105263157894737,\n \"acc_stderr\": 0.03690677986137283,\n \"acc_norm\": 0.7105263157894737,\n \"acc_norm_stderr\": 0.03690677986137283\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.64,\n \"acc_stderr\": 0.04824181513244218,\n \"acc_norm\": 0.64,\n \"acc_norm_stderr\": 0.04824181513244218\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.7094339622641509,\n \"acc_stderr\": 0.02794321998933714,\n \"acc_norm\": 0.7094339622641509,\n \"acc_norm_stderr\": 0.02794321998933714\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.7708333333333334,\n \"acc_stderr\": 0.03514697467862388,\n \"acc_norm\": 0.7708333333333334,\n \"acc_norm_stderr\": 0.03514697467862388\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.48,\n \"acc_stderr\": 0.050211673156867795,\n \"acc_norm\": 0.48,\n \"acc_norm_stderr\": 0.050211673156867795\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.54,\n \"acc_stderr\": 0.05009082659620333,\n \"acc_norm\": 0.54,\n \"acc_norm_stderr\": 0.05009082659620333\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.32,\n \"acc_stderr\": 0.046882617226215034,\n \"acc_norm\": 0.32,\n \"acc_norm_stderr\": 0.046882617226215034\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.6705202312138728,\n \"acc_stderr\": 0.03583901754736412,\n \"acc_norm\": 0.6705202312138728,\n \"acc_norm_stderr\": 0.03583901754736412\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.4411764705882353,\n \"acc_stderr\": 0.049406356306056595,\n \"acc_norm\": 0.4411764705882353,\n \"acc_norm_stderr\": 0.049406356306056595\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.77,\n \"acc_stderr\": 0.04229525846816506,\n \"acc_norm\": 0.77,\n \"acc_norm_stderr\": 0.04229525846816506\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.5829787234042553,\n \"acc_stderr\": 0.03223276266711712,\n \"acc_norm\": 0.5829787234042553,\n \"acc_norm_stderr\": 0.03223276266711712\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.5087719298245614,\n \"acc_stderr\": 0.04702880432049615,\n \"acc_norm\": 0.5087719298245614,\n \"acc_norm_stderr\": 0.04702880432049615\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.5586206896551724,\n \"acc_stderr\": 0.04137931034482757,\n \"acc_norm\": 0.5586206896551724,\n \"acc_norm_stderr\": 0.04137931034482757\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.41005291005291006,\n \"acc_stderr\": 0.02533120243894443,\n \"acc_norm\": 0.41005291005291006,\n \"acc_norm_stderr\": 0.02533120243894443\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.4523809523809524,\n \"acc_stderr\": 0.044518079590553275,\n \"acc_norm\": 0.4523809523809524,\n \"acc_norm_stderr\": 0.044518079590553275\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.33,\n \"acc_stderr\": 0.04725815626252604,\n \"acc_norm\": 0.33,\n \"acc_norm_stderr\": 0.04725815626252604\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.7967741935483871,\n \"acc_stderr\": 0.02289168798455496,\n \"acc_norm\": 0.7967741935483871,\n \"acc_norm_stderr\": 0.02289168798455496\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.4876847290640394,\n \"acc_stderr\": 0.035169204442208966,\n \"acc_norm\": 0.4876847290640394,\n \"acc_norm_stderr\": 0.035169204442208966\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.7,\n \"acc_stderr\": 0.046056618647183814,\n \"acc_norm\": 0.7,\n \"acc_norm_stderr\": 0.046056618647183814\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.7696969696969697,\n \"acc_stderr\": 0.0328766675860349,\n \"acc_norm\": 0.7696969696969697,\n \"acc_norm_stderr\": 0.0328766675860349\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.797979797979798,\n \"acc_stderr\": 0.02860620428922987,\n \"acc_norm\": 0.797979797979798,\n \"acc_norm_stderr\": 0.02860620428922987\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.9015544041450777,\n \"acc_stderr\": 0.021500249576033456,\n \"acc_norm\": 0.9015544041450777,\n \"acc_norm_stderr\": 0.021500249576033456\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.6794871794871795,\n \"acc_stderr\": 0.02366129639396428,\n \"acc_norm\": 0.6794871794871795,\n \"acc_norm_stderr\": 0.02366129639396428\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.35555555555555557,\n \"acc_stderr\": 0.029185714949857416,\n \"acc_norm\": 0.35555555555555557,\n \"acc_norm_stderr\": 0.029185714949857416\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.6848739495798319,\n \"acc_stderr\": 0.030176808288974337,\n \"acc_norm\": 0.6848739495798319,\n \"acc_norm_stderr\": 0.030176808288974337\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.3576158940397351,\n \"acc_stderr\": 0.03913453431177258,\n \"acc_norm\": 0.3576158940397351,\n \"acc_norm_stderr\": 0.03913453431177258\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.8422018348623853,\n \"acc_stderr\": 0.01563002297009244,\n \"acc_norm\": 0.8422018348623853,\n \"acc_norm_stderr\": 0.01563002297009244\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.5231481481481481,\n \"acc_stderr\": 0.03406315360711507,\n \"acc_norm\": 0.5231481481481481,\n \"acc_norm_stderr\": 0.03406315360711507\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.8431372549019608,\n \"acc_stderr\": 0.02552472232455335,\n \"acc_norm\": 0.8431372549019608,\n \"acc_norm_stderr\": 0.02552472232455335\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.7890295358649789,\n \"acc_stderr\": 0.026558372502661916,\n \"acc_norm\": 0.7890295358649789,\n \"acc_norm_stderr\": 0.026558372502661916\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.6816143497757847,\n \"acc_stderr\": 0.03126580522513713,\n \"acc_norm\": 0.6816143497757847,\n \"acc_norm_stderr\": 0.03126580522513713\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.7938931297709924,\n \"acc_stderr\": 0.03547771004159465,\n \"acc_norm\": 0.7938931297709924,\n \"acc_norm_stderr\": 0.03547771004159465\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.7933884297520661,\n \"acc_stderr\": 0.03695980128098824,\n \"acc_norm\": 0.7933884297520661,\n \"acc_norm_stderr\": 0.03695980128098824\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.7592592592592593,\n \"acc_stderr\": 0.04133119440243838,\n \"acc_norm\": 0.7592592592592593,\n \"acc_norm_stderr\": 0.04133119440243838\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.7607361963190185,\n \"acc_stderr\": 0.0335195387952127,\n \"acc_norm\": 0.7607361963190185,\n \"acc_norm_stderr\": 0.0335195387952127\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.4732142857142857,\n \"acc_stderr\": 0.047389751192741546,\n \"acc_norm\": 0.4732142857142857,\n \"acc_norm_stderr\": 0.047389751192741546\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.7864077669902912,\n \"acc_stderr\": 0.040580420156460344,\n \"acc_norm\": 0.7864077669902912,\n \"acc_norm_stderr\": 0.040580420156460344\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8760683760683761,\n \"acc_stderr\": 0.021586494001281365,\n \"acc_norm\": 0.8760683760683761,\n \"acc_norm_stderr\": 0.021586494001281365\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.71,\n \"acc_stderr\": 0.045604802157206845,\n \"acc_norm\": 0.71,\n \"acc_norm_stderr\": 0.045604802157206845\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.8314176245210728,\n \"acc_stderr\": 0.013387895731543604,\n \"acc_norm\": 0.8314176245210728,\n \"acc_norm_stderr\": 0.013387895731543604\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.7485549132947977,\n \"acc_stderr\": 0.02335736578587403,\n \"acc_norm\": 0.7485549132947977,\n \"acc_norm_stderr\": 0.02335736578587403\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.4346368715083799,\n \"acc_stderr\": 0.016578997435496717,\n \"acc_norm\": 0.4346368715083799,\n \"acc_norm_stderr\": 0.016578997435496717\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.7254901960784313,\n \"acc_stderr\": 0.025553169991826524,\n \"acc_norm\": 0.7254901960784313,\n \"acc_norm_stderr\": 0.025553169991826524\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.7170418006430869,\n \"acc_stderr\": 0.025583062489984813,\n \"acc_norm\": 0.7170418006430869,\n \"acc_norm_stderr\": 0.025583062489984813\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.7469135802469136,\n \"acc_stderr\": 0.024191808600712995,\n \"acc_norm\": 0.7469135802469136,\n \"acc_norm_stderr\": 0.024191808600712995\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.49645390070921985,\n \"acc_stderr\": 0.02982674915328092,\n \"acc_norm\": 0.49645390070921985,\n \"acc_norm_stderr\": 0.02982674915328092\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.47327249022164275,\n \"acc_stderr\": 0.012751977967676012,\n \"acc_norm\": 0.47327249022164275,\n \"acc_norm_stderr\": 0.012751977967676012\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.6764705882352942,\n \"acc_stderr\": 0.02841820861940676,\n \"acc_norm\": 0.6764705882352942,\n \"acc_norm_stderr\": 0.02841820861940676\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.6764705882352942,\n \"acc_stderr\": 0.018926082916083383,\n \"acc_norm\": 0.6764705882352942,\n \"acc_norm_stderr\": 0.018926082916083383\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6727272727272727,\n \"acc_stderr\": 0.0449429086625209,\n \"acc_norm\": 0.6727272727272727,\n \"acc_norm_stderr\": 0.0449429086625209\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.7346938775510204,\n \"acc_stderr\": 0.028263889943784593,\n \"acc_norm\": 0.7346938775510204,\n \"acc_norm_stderr\": 0.028263889943784593\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.8258706467661692,\n \"acc_stderr\": 0.026814951200421603,\n \"acc_norm\": 0.8258706467661692,\n \"acc_norm_stderr\": 0.026814951200421603\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.87,\n \"acc_stderr\": 0.033799766898963086,\n \"acc_norm\": 0.87,\n \"acc_norm_stderr\": 0.033799766898963086\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5662650602409639,\n \"acc_stderr\": 0.03858158940685516,\n \"acc_norm\": 0.5662650602409639,\n \"acc_norm_stderr\": 0.03858158940685516\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.8421052631578947,\n \"acc_stderr\": 0.027966785859160893,\n \"acc_norm\": 0.8421052631578947,\n \"acc_norm_stderr\": 0.027966785859160893\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.5691554467564259,\n \"mc1_stderr\": 0.01733527247533237,\n \"mc2\": 0.7012232532865607,\n \"mc2_stderr\": 0.01495517671484403\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.8358326756116812,\n \"acc_stderr\": 0.010410849775222782\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.7050796057619408,\n \"acc_stderr\": 0.012560698010954772\n }\n}\n```", "repo_url": "https://huggingface.co/CultriX/OmniTrixAI", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_01_25T18_33_17.623853", "path": ["**/details_harness|arc:challenge|25_2024-01-25T18-33-17.623853.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-01-25T18-33-17.623853.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_01_25T18_33_17.623853", "path": ["**/details_harness|gsm8k|5_2024-01-25T18-33-17.623853.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-01-25T18-33-17.623853.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_01_25T18_33_17.623853", "path": ["**/details_harness|hellaswag|10_2024-01-25T18-33-17.623853.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-01-25T18-33-17.623853.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_01_25T18_33_17.623853", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-25T18-33-17.623853.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-25T18-33-17.623853.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-25T18-33-17.623853.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-25T18-33-17.623853.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-25T18-33-17.623853.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-25T18-33-17.623853.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-25T18-33-17.623853.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-25T18-33-17.623853.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-25T18-33-17.623853.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-25T18-33-17.623853.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-25T18-33-17.623853.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-25T18-33-17.623853.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-25T18-33-17.623853.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-25T18-33-17.623853.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-25T18-33-17.623853.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-25T18-33-17.623853.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-25T18-33-17.623853.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-25T18-33-17.623853.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-25T18-33-17.623853.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-25T18-33-17.623853.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-25T18-33-17.623853.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-25T18-33-17.623853.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-25T18-33-17.623853.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-25T18-33-17.623853.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-25T18-33-17.623853.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-25T18-33-17.623853.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-25T18-33-17.623853.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-25T18-33-17.623853.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-25T18-33-17.623853.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-25T18-33-17.623853.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-25T18-33-17.623853.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-25T18-33-17.623853.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-25T18-33-17.623853.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-25T18-33-17.623853.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-25T18-33-17.623853.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-25T18-33-17.623853.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-25T18-33-17.623853.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-25T18-33-17.623853.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-25T18-33-17.623853.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-25T18-33-17.623853.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-25T18-33-17.623853.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-25T18-33-17.623853.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-25T18-33-17.623853.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-25T18-33-17.623853.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-25T18-33-17.623853.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-25T18-33-17.623853.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-25T18-33-17.623853.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-25T18-33-17.623853.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-25T18-33-17.623853.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-25T18-33-17.623853.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-25T18-33-17.623853.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-25T18-33-17.623853.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-25T18-33-17.623853.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-25T18-33-17.623853.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-25T18-33-17.623853.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-25T18-33-17.623853.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-25T18-33-17.623853.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-25T18-33-17.623853.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-25T18-33-17.623853.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-25T18-33-17.623853.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-25T18-33-17.623853.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-25T18-33-17.623853.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-25T18-33-17.623853.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-25T18-33-17.623853.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-25T18-33-17.623853.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-25T18-33-17.623853.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-25T18-33-17.623853.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-25T18-33-17.623853.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-25T18-33-17.623853.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-25T18-33-17.623853.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-25T18-33-17.623853.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-25T18-33-17.623853.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-25T18-33-17.623853.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-25T18-33-17.623853.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-25T18-33-17.623853.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-25T18-33-17.623853.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-25T18-33-17.623853.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-25T18-33-17.623853.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-25T18-33-17.623853.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-25T18-33-17.623853.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-25T18-33-17.623853.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-25T18-33-17.623853.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-25T18-33-17.623853.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-25T18-33-17.623853.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-25T18-33-17.623853.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-25T18-33-17.623853.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-25T18-33-17.623853.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-25T18-33-17.623853.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-25T18-33-17.623853.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-25T18-33-17.623853.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-25T18-33-17.623853.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-25T18-33-17.623853.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-25T18-33-17.623853.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-25T18-33-17.623853.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-25T18-33-17.623853.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-25T18-33-17.623853.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-25T18-33-17.623853.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-25T18-33-17.623853.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-25T18-33-17.623853.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-25T18-33-17.623853.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-25T18-33-17.623853.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-25T18-33-17.623853.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-25T18-33-17.623853.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-25T18-33-17.623853.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-25T18-33-17.623853.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-25T18-33-17.623853.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-25T18-33-17.623853.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-25T18-33-17.623853.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-25T18-33-17.623853.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-25T18-33-17.623853.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-25T18-33-17.623853.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-25T18-33-17.623853.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-25T18-33-17.623853.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-25T18-33-17.623853.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_01_25T18_33_17.623853", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-25T18-33-17.623853.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-25T18-33-17.623853.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_01_25T18_33_17.623853", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-25T18-33-17.623853.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-25T18-33-17.623853.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_01_25T18_33_17.623853", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-25T18-33-17.623853.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-25T18-33-17.623853.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_01_25T18_33_17.623853", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-25T18-33-17.623853.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-25T18-33-17.623853.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_01_25T18_33_17.623853", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-25T18-33-17.623853.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-25T18-33-17.623853.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_01_25T18_33_17.623853", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-25T18-33-17.623853.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-25T18-33-17.623853.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_01_25T18_33_17.623853", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-25T18-33-17.623853.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-25T18-33-17.623853.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_01_25T18_33_17.623853", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-25T18-33-17.623853.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-25T18-33-17.623853.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_01_25T18_33_17.623853", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-25T18-33-17.623853.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-25T18-33-17.623853.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_01_25T18_33_17.623853", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-25T18-33-17.623853.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-25T18-33-17.623853.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_01_25T18_33_17.623853", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-25T18-33-17.623853.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-25T18-33-17.623853.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_01_25T18_33_17.623853", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-25T18-33-17.623853.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-25T18-33-17.623853.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_01_25T18_33_17.623853", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-25T18-33-17.623853.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-25T18-33-17.623853.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_01_25T18_33_17.623853", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-25T18-33-17.623853.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-25T18-33-17.623853.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_01_25T18_33_17.623853", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-25T18-33-17.623853.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-25T18-33-17.623853.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_01_25T18_33_17.623853", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-25T18-33-17.623853.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-25T18-33-17.623853.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_01_25T18_33_17.623853", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-25T18-33-17.623853.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-25T18-33-17.623853.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_01_25T18_33_17.623853", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-25T18-33-17.623853.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-25T18-33-17.623853.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_01_25T18_33_17.623853", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-25T18-33-17.623853.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-25T18-33-17.623853.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_01_25T18_33_17.623853", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-25T18-33-17.623853.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-25T18-33-17.623853.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_01_25T18_33_17.623853", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-25T18-33-17.623853.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-25T18-33-17.623853.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_01_25T18_33_17.623853", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-25T18-33-17.623853.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-25T18-33-17.623853.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_01_25T18_33_17.623853", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-25T18-33-17.623853.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-25T18-33-17.623853.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_01_25T18_33_17.623853", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-25T18-33-17.623853.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-25T18-33-17.623853.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_01_25T18_33_17.623853", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-25T18-33-17.623853.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-25T18-33-17.623853.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_01_25T18_33_17.623853", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-25T18-33-17.623853.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-25T18-33-17.623853.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_01_25T18_33_17.623853", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-25T18-33-17.623853.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-25T18-33-17.623853.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_01_25T18_33_17.623853", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-25T18-33-17.623853.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-25T18-33-17.623853.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_01_25T18_33_17.623853", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-25T18-33-17.623853.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-25T18-33-17.623853.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_01_25T18_33_17.623853", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-25T18-33-17.623853.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-25T18-33-17.623853.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_01_25T18_33_17.623853", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-25T18-33-17.623853.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-25T18-33-17.623853.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_01_25T18_33_17.623853", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-25T18-33-17.623853.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-25T18-33-17.623853.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_01_25T18_33_17.623853", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-25T18-33-17.623853.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-25T18-33-17.623853.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_01_25T18_33_17.623853", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-25T18-33-17.623853.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-25T18-33-17.623853.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_01_25T18_33_17.623853", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-25T18-33-17.623853.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-25T18-33-17.623853.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_01_25T18_33_17.623853", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-25T18-33-17.623853.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-25T18-33-17.623853.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_01_25T18_33_17.623853", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-25T18-33-17.623853.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-25T18-33-17.623853.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_01_25T18_33_17.623853", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-25T18-33-17.623853.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-25T18-33-17.623853.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_01_25T18_33_17.623853", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-25T18-33-17.623853.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-25T18-33-17.623853.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_01_25T18_33_17.623853", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-25T18-33-17.623853.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-25T18-33-17.623853.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_01_25T18_33_17.623853", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-25T18-33-17.623853.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-25T18-33-17.623853.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_01_25T18_33_17.623853", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-25T18-33-17.623853.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-25T18-33-17.623853.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_01_25T18_33_17.623853", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-25T18-33-17.623853.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-25T18-33-17.623853.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_01_25T18_33_17.623853", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-25T18-33-17.623853.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-25T18-33-17.623853.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_01_25T18_33_17.623853", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-25T18-33-17.623853.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-25T18-33-17.623853.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_01_25T18_33_17.623853", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-25T18-33-17.623853.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-25T18-33-17.623853.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_01_25T18_33_17.623853", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-25T18-33-17.623853.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-25T18-33-17.623853.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_01_25T18_33_17.623853", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-25T18-33-17.623853.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-25T18-33-17.623853.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_01_25T18_33_17.623853", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-25T18-33-17.623853.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-25T18-33-17.623853.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_01_25T18_33_17.623853", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-25T18-33-17.623853.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-25T18-33-17.623853.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_01_25T18_33_17.623853", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-25T18-33-17.623853.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-25T18-33-17.623853.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_01_25T18_33_17.623853", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-25T18-33-17.623853.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-25T18-33-17.623853.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_01_25T18_33_17.623853", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-25T18-33-17.623853.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-25T18-33-17.623853.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_01_25T18_33_17.623853", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-25T18-33-17.623853.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-25T18-33-17.623853.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_01_25T18_33_17.623853", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-25T18-33-17.623853.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-25T18-33-17.623853.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_01_25T18_33_17.623853", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-25T18-33-17.623853.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-25T18-33-17.623853.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_01_25T18_33_17.623853", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-25T18-33-17.623853.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-25T18-33-17.623853.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_01_25T18_33_17.623853", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-25T18-33-17.623853.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-25T18-33-17.623853.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_01_25T18_33_17.623853", "path": ["**/details_harness|winogrande|5_2024-01-25T18-33-17.623853.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-01-25T18-33-17.623853.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_01_25T18_33_17.623853", "path": ["results_2024-01-25T18-33-17.623853.parquet"]}, {"split": "latest", "path": ["results_2024-01-25T18-33-17.623853.parquet"]}]}]}
2024-01-25T18:35:59+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of CultriX/OmniTrixAI Dataset automatically created during the evaluation run of model CultriX/OmniTrixAI on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-01-25T18:33:17.623853(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of CultriX/OmniTrixAI\n\n\n\nDataset automatically created during the evaluation run of model CultriX/OmniTrixAI on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-25T18:33:17.623853(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of CultriX/OmniTrixAI\n\n\n\nDataset automatically created during the evaluation run of model CultriX/OmniTrixAI on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-25T18:33:17.623853(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ 6, 179, 68, 4, 40, 29, 3, 4, 9, 6, 5, 7, 4, 7, 10, 9, 5, 9, 8, 10, 46, 8, 7, 10, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of CultriX/OmniTrixAI\n\n\n\nDataset automatically created during the evaluation run of model CultriX/OmniTrixAI on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2024-01-25T18:33:17.623853(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):## Dataset Details### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:## Uses### Direct Use### Out-of-Scope Use## Dataset Structure## Dataset Creation### Curation Rationale### Source Data#### Data Collection and Processing#### Who are the source data producers?### Annotations [optional]#### Annotation process#### Who are the annotators?#### Personal and Sensitive Information## Bias, Risks, and Limitations### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:## Glossary [optional]## More Information [optional]## Dataset Card Authors [optional]## Dataset Card Contact" ]
[ -0.04960283264517784, 0.20934072136878967, -0.006638587452471256, 0.0413077138364315, 0.0750470831990242, -0.008131339214742184, 0.0401657409965992, 0.10075915604829788, 0.034258775413036346, 0.18024283647537231, -0.01431274600327015, 0.1010800153017044, 0.08714570105075836, 0.12031622976064682, 0.033760908991098404, -0.13132543861865997, 0.04081602022051811, -0.10503543168306351, 0.10058745741844177, 0.06726926565170288, 0.05836523696780205, -0.0810534879565239, 0.06249484419822693, -0.029975084587931633, 0.03932999074459076, -0.006369186565279961, -0.06559470295906067, -0.03804420679807663, 0.10856227576732635, 0.09308798611164093, 0.0456528514623642, -0.018383868038654327, 0.04038533568382263, -0.26537972688674927, 0.020513510331511497, 0.10185642540454865, -0.005363160278648138, 0.043245259672403336, 0.1499272882938385, -0.08571399003267288, 0.0758160650730133, -0.02878536470234394, 0.06788741052150726, 0.04995184764266014, -0.11052367836236954, -0.12836220860481262, -0.1492377668619156, 0.017374301329255104, 0.056415632367134094, 0.047185033559799194, -0.018777616322040558, 0.11720447242259979, -0.03858112916350365, 0.04675367474555969, 0.14065682888031006, -0.171086385846138, -0.02660784311592579, 0.03752598538994789, 0.019348783418536186, 0.08017271757125854, -0.0843973159790039, -0.0251977127045393, 0.02513512410223484, 0.05615948140621185, -0.008692671544849873, 0.01173829659819603, 0.022786324843764305, 0.018174706026911736, -0.14445284008979797, -0.12433388084173203, 0.102830670773983, -0.004442563280463219, -0.0467720627784729, -0.15527495741844177, -0.0441582053899765, -0.0002680903417058289, 0.006036616861820221, 0.018861843273043633, 0.011222065426409245, -0.0006982766790315509, 0.08064335584640503, -0.0068482449278235435, -0.09341791272163391, -0.027685297653079033, -0.03845006600022316, 0.03765280172228813, 0.03150831535458565, 0.0013093549059703946, 0.011141260154545307, 0.12956704199314117, 0.033639710396528244, -0.052529990673065186, -0.08236370235681534, -0.04680810496211052, -0.13381606340408325, -0.0379004180431366, 0.02167476899921894, -0.08528014272451401, 0.050790149718523026, 0.24630482494831085, -0.030574385076761246, 0.021180972456932068, -0.11153288930654526, 0.012003408744931221, 0.11968129873275757, 0.09274906665086746, -0.09051606804132462, -0.08006870746612549, -0.0451461561024189, 0.01812518946826458, 0.020159119740128517, -0.02546975016593933, 0.02711128070950508, 0.0667889416217804, 0.009520800784230232, 0.13144584000110626, 0.11953306198120117, 0.024846352636814117, -0.07178828865289688, -0.010013826191425323, 0.1658574938774109, -0.16392822563648224, 0.00466303713619709, 0.025471188127994537, -0.017567381262779236, -0.08101621270179749, 0.0644763708114624, -0.013004464097321033, -0.053795259445905685, 0.11163879930973053, -0.058763537555933, -0.0702943280339241, -0.09063216298818588, -0.0660560205578804, 0.034862905740737915, -0.010164340026676655, -0.06302837282419205, -0.07424474507570267, -0.11364593356847763, -0.08340422809123993, 0.028422363102436066, -0.06784221529960632, -0.0012544150231406093, 0.004536945838481188, 0.015578407794237137, -0.006832926534116268, -0.009050728753209114, 0.10975316911935806, -0.07051350176334381, 0.03363899886608124, -0.051631730049848557, 0.029635028913617134, 0.11087226122617722, 0.026411764323711395, -0.11017686128616333, 0.09253129363059998, -0.09673186391592026, 0.0964004397392273, -0.10109563916921616, -0.02537478320300579, -0.11653085798025131, 0.019377397373318672, -0.01870817318558693, 0.026012394577264786, -0.014221603982150555, 0.0833999365568161, -0.1923273205757141, -0.004757782910019159, 0.185421422123909, -0.11509889364242554, -0.05355345457792282, 0.09237243235111237, -0.027977418154478073, 0.05548761039972305, 0.03648291155695915, 0.07586263865232468, 0.08804984390735626, -0.09057459980249405, -0.11713165044784546, -0.05800863355398178, -0.03786173835396767, 0.15809114277362823, 0.06002962961792946, -0.08917924761772156, 0.09828311949968338, 0.03274789825081825, 0.002774689346551895, -0.05109516903758049, -0.010260913521051407, -0.054989080876111984, -0.00421189796179533, -0.0459604412317276, -0.06661441177129745, -0.02981683611869812, -0.07798897475004196, 0.003058112459257245, -0.05951343849301338, -0.008511796593666077, 0.09778575599193573, -0.01836881786584854, 0.027432525530457497, -0.08624561131000519, 0.060197364538908005, -0.0156265702098608, 0.016497138887643814, -0.21362367272377014, -0.07697492837905884, 0.03813714161515236, -0.17767007648944855, 0.050337426364421844, 0.001383985741995275, 0.020526256412267685, 0.06201538071036339, 0.004315129015594721, 0.015321695245802402, 0.024512944743037224, -0.011335212737321854, -0.011040674522519112, -0.1451897770166397, -0.039846472442150116, -0.0792471244931221, 0.08330205827951431, -0.10893412679433823, -0.021135183051228523, 0.05842341482639313, 0.15002083778381348, 0.01943494752049446, -0.07892950624227524, 0.044456854462623596, 0.015027125366032124, -0.05251835659146309, -0.052717190235853195, -0.004563736729323864, -0.01044611819088459, 0.043469056487083435, 0.07691596448421478, -0.17676983773708344, -0.1293189972639084, 0.06858689337968826, 0.14460669457912445, -0.07649102061986923, -0.060336727648973465, -0.06775477528572083, -0.05787145718932152, -0.09153152257204056, -0.05807870626449585, 0.10480860620737076, 0.09306573122739792, 0.04725050553679466, -0.06464120000600815, -0.04703691229224205, -0.004549142438918352, 0.04603524133563042, -0.07048194855451584, 0.1069812998175621, 0.10207650065422058, -0.08854619413614273, 0.10229640454053879, -0.022841930389404297, 0.10323765128850937, 0.09950230270624161, 0.014271681196987629, -0.12092094868421555, -0.009650524705648422, 0.06234254688024521, 0.05717026814818382, 0.06559891253709793, -0.004400233272463083, 0.040142931044101715, 0.08127249032258987, -0.008044433780014515, 0.0342756062746048, -0.0641574114561081, 0.03505862131714821, 0.027600213885307312, -0.0036495935637503862, 0.01147510576993227, 0.001296710455790162, 0.028532719239592552, 0.0920725166797638, 0.00855323951691389, 0.07668621093034744, -0.03977617993950844, -0.047112561762332916, -0.08499862998723984, 0.1324273645877838, -0.09641049802303314, -0.23175381124019623, -0.1790284663438797, -0.036775749176740646, -0.027398373931646347, -0.012326454743742943, 0.04655683413147926, 0.012462190352380276, -0.10451187938451767, -0.11493553966283798, 0.042367007583379745, 0.047839436680078506, -0.1178676038980484, -0.030634604394435883, 0.032832249999046326, -0.018275605514645576, -0.16675019264221191, 0.02956327237188816, 0.043853145092725754, -0.072879359126091, 0.0227817315608263, 0.08211950957775116, 0.12484540045261383, 0.0923854410648346, 0.08941571414470673, -0.026402564719319344, -0.01168400514870882, 0.15146514773368835, -0.1157500296831131, 0.03606951981782913, 0.08441236615180969, -0.0416724719107151, 0.07645626366138458, 0.14166831970214844, -0.00010319332068320364, -0.07695134729146957, 0.044535085558891296, 0.09651747345924377, -0.06068405881524086, -0.2570285499095917, -0.08047028630971909, -0.032030023634433746, 0.0598885752260685, 0.09932689368724823, 0.07421109080314636, -0.009635007940232754, -0.005419932771474123, -0.10642492026090622, -0.03289123252034187, -0.02669622004032135, 0.05766426399350166, 0.024240290746092796, -0.008094079792499542, 0.04452443867921829, -0.0508919358253479, 0.02566087804734707, 0.12961351871490479, 0.03801151365041733, 0.17167337238788605, -0.03930873051285744, 0.19028379023075104, 0.09032679349184036, 0.07623256742954254, -0.02314421534538269, 0.07066988199949265, -0.013008926063776016, 0.07337985187768936, -0.015002356842160225, -0.09325404465198517, -0.028674693778157234, 0.09775326400995255, 0.0544821172952652, -0.04850629344582558, 0.059881970286369324, -0.05785521864891052, 0.05763997882604599, 0.2536032497882843, -0.00890620518475771, -0.12660886347293854, -0.03583015501499176, 0.05049942806363106, -0.05287769064307213, -0.09407704323530197, 0.004002709407359362, 0.0924818366765976, -0.15250366926193237, 0.012266119942069054, -0.0398748405277729, 0.06804189831018448, -0.1339648813009262, -0.03199942037463188, -0.034283120185136795, 0.050754569470882416, -0.02583659254014492, 0.09563500434160233, -0.142288938164711, 0.09176018089056015, -0.014621609821915627, 0.020311428233981133, -0.06133516505360603, 0.06867292523384094, -0.008559183217585087, -0.05617662891745567, 0.14964549243450165, -0.005123435519635677, -0.10013820976018906, -0.0640522688627243, -0.13616636395454407, -0.012580077163875103, 0.04150094464421272, -0.11736390739679337, 0.1184493750333786, 0.012054036371409893, -0.032288409769535065, -0.04600057750940323, -0.00932554341852665, -0.0762028619647026, -0.23001697659492493, 0.09103122353553772, -0.13401615619659424, 0.06047813594341278, -0.05457819253206253, -0.044524457305669785, -0.06817926466464996, 0.13470341265201569, -0.12680374085903168, -0.05683264508843422, -0.10814081877470016, -0.039170946925878525, 0.16165058314800262, -0.063811756670475, 0.05552516505122185, -0.0460725799202919, 0.1598513126373291, -0.04404253512620926, -0.04960770159959793, 0.003052748506888747, -0.07281950116157532, -0.17499199509620667, -0.0513254813849926, 0.10139886289834976, 0.061154790222644806, 0.017733480781316757, -0.014114252291619778, 0.051338840276002884, 0.02018728293478489, -0.09013344347476959, 0.04088067263364792, 0.12984925508499146, 0.12251359224319458, 0.059584278613328934, -0.03397973254323006, -0.10576090961694717, -0.10168030112981796, -0.09351235628128052, 0.06498425453901291, 0.18485526740550995, -0.06155507266521454, 0.1453023999929428, 0.1363082230091095, -0.11826466768980026, -0.20089679956436157, -0.09497250616550446, -0.0058800517581403255, -0.014172970317304134, 0.11179356276988983, -0.19904620945453644, 0.035584624856710434, 0.0902741327881813, -0.024545105174183846, 0.1163840964436531, -0.2739216387271881, -0.13645659387111664, 0.03921039402484894, 0.03710910677909851, -0.18079553544521332, -0.14960794150829315, -0.09145206958055496, -0.003665800206363201, -0.13183464109897614, 0.10624941438436508, 0.0009144629584625363, 0.04076084867119789, -0.016739431768655777, 0.05146333947777748, 0.040711067616939545, -0.07331164181232452, 0.12153713405132294, -0.02320513129234314, 0.028847521170973778, -0.0918029248714447, -0.02456241101026535, -0.016640333458781242, -0.04397846385836601, 0.07508071511983871, 0.023501435294747353, 0.03409428894519806, -0.059086233377456665, -0.03383073955774307, -0.049398086965084076, 0.033064886927604675, -0.060429032891988754, -0.05776449292898178, -0.05064227059483528, 0.08495654910802841, 0.07630033791065216, -0.012673377059400082, 0.03373907506465912, -0.04322636500000954, 0.04257207736372948, 0.21126244962215424, 0.06586170196533203, 0.04762735962867737, -0.10525529086589813, -0.04851020500063896, -0.01783822476863861, -0.0030520539730787277, -0.07199446111917496, 0.04859095439314842, 0.08810573071241379, 0.036796171218156815, 0.10600850731134415, -0.015418166294693947, -0.19628673791885376, 0.01021180022507906, 0.07623782753944397, -0.10525684058666229, -0.21067582070827484, 0.04740346595644951, 0.06259752810001373, -0.11394665390253067, -0.08668939769268036, 0.09722280502319336, 0.027222292497754097, -0.02551237680017948, 0.0059326509945094585, 0.07977785170078278, 0.04457561671733856, 0.0811794325709343, -0.01950019784271717, 0.033880989998579025, -0.06545130163431168, 0.10876589268445969, 0.15692520141601562, -0.1082671657204628, -0.0010057489853352308, 0.056271977722644806, -0.046175986528396606, -0.059613894671201706, -0.019105421379208565, 0.03499618545174599, 0.007188056129962206, -0.031009124591946602, -0.003458322025835514, -0.04495302215218544, 0.07658512145280838, 0.15239699184894562, -0.015198505483567715, 0.06988342851400375, 0.019898297265172005, -0.00534086674451828, -0.04587544500827789, 0.11020396649837494, 0.02981954999268055, 0.045271534472703934, -0.0273906122893095, 0.03693317249417305, 0.011330699548125267, -0.02351507358253002, 0.020130334421992302, -0.06403513997793198, -0.06419478356838226, 0.008882677182555199, -0.1825578510761261, 0.04282289743423462, -0.07996903359889984, -0.009607553482055664, 0.006515998858958483, 0.002108887303620577, 0.0017043504631146789, 0.007736677769571543, -0.057076264172792435, -0.0517900176346302, -0.04852663353085518, 0.13093584775924683, -0.2042948305606842, -0.007300690282136202, 0.09070441126823425, -0.08298788964748383, 0.07210785895586014, 0.00601328955963254, -0.011958087794482708, 0.010233191773295403, -0.08890636265277863, -0.010521356016397476, -0.027796020731329918, 0.06037192419171333, 0.014209787361323833, -0.13416706025600433, -0.012751284055411816, -0.007874579168856144, -0.0888444110751152, -0.0014370400458574295, 0.020636966452002525, -0.1391766518354416, 0.08935896307229996, 0.09523561596870422, -0.03982195630669594, -0.035937000066041946, 0.02351396717131138, 0.03407476842403412, 0.011056633666157722, 0.0905010774731636, -0.026742953807115555, 0.034589629620313644, -0.15151973068714142, -0.03523348271846771, 0.007421235088258982, 0.012491513974964619, 0.04990147054195404, -0.0009634946472942829, 0.025775229558348656, -0.00308260228484869, 0.21391423046588898, -0.02698793075978756, 0.01675497367978096, 0.022377582266926765, -0.015911828726530075, -0.045637503266334534, 0.03406360000371933, -0.03765740618109703, 0.010608397424221039, 0.01904246211051941, 0.002388517837971449, -0.03225356712937355, -0.05224619060754776, -0.01331448182463646, 0.08986667543649673, 0.11496958881616592, 0.21084819734096527, -0.02973197214305401, 0.054422616958618164, -0.15006107091903687, -0.0671197921037674, -0.02268066257238388, -0.0915248841047287, 0.0459434911608696, -0.06485877186059952, 0.058536361902952194, 0.10035829246044159, -0.12104252725839615, 0.1459054797887802, -0.04484514892101288, -0.021394267678260803, -0.0412515290081501, -0.1784733235836029, -0.035774558782577515, 0.04250837117433548, 0.0049948617815971375, -0.08108538389205933, 0.11931266635656357, 0.12056876718997955, 0.01312443520873785, 0.000976632465608418, 0.06636568158864975, -0.08645112067461014, -0.05276552587747574, -0.03173947334289551, 0.020251940935850143, 0.02728847973048687, 0.0037317832466214895, 0.05994543060660362, -0.0003920788876712322, 0.05447420850396156, 0.07415913790464401, 0.1030750647187233, 0.053868234157562256, 0.03770853951573372, -0.02892437018454075, -0.04161282256245613, 0.00017343957733828574, -0.02516786940395832, -0.07378948479890823, 0.1777016520500183, 0.07068060338497162, 0.027472060173749924, 0.025115936994552612, 0.1916007399559021, -0.013681563548743725, -0.07233685255050659, -0.14420616626739502, 0.15591289103031158, -0.006798649672418833, 0.03389777988195419, 0.029696378856897354, -0.11586976796388626, 0.0010147904977202415, 0.15225090086460114, 0.10045003890991211, 0.0071963961236178875, 0.01222939882427454, 0.04863744601607323, 0.02274705469608307, -0.023878835141658783, 0.04648108035326004, 0.04129698872566223, 0.2386637181043625, -0.05738329887390137, 0.08249931037425995, -0.02484072931110859, 0.0032248657662421465, -0.0362982302904129, 0.1222185268998146, -0.07511579245328903, 0.02175985835492611, -0.0629863440990448, 0.06556469947099686, -0.06786087155342102, -0.26447147130966187, -0.013371085748076439, -0.06273356080055237, -0.13762710988521576, -0.0061003719456493855, 0.025821620598435402, -0.02951461263000965, 0.0372009202837944, 0.04393326863646507, -0.031175877898931503, 0.19395817816257477, 0.01141090877354145, -0.055933158844709396, -0.06941142678260803, 0.060106005519628525, -0.05874006822705269, 0.27732396125793457, 0.00437053432688117, 0.02384883351624012, 0.08018787205219269, -0.009004914201796055, -0.1288614422082901, 0.0344100221991539, 0.09128178656101227, -0.05975435674190521, 0.036754947155714035, 0.13244129717350006, -0.015837162733078003, 0.13340160250663757, 0.03237442672252655, 0.022477807477116585, 0.0797019675374031, 0.043260011821985245, 0.024595798924565315, -0.07808761298656464, 0.06247656047344208, -0.08200395852327347, 0.11978403478860855, 0.11673029512166977, -0.0071103875525295734, 0.013615927658975124, -0.05090133473277092, 0.04669031873345375, -0.048877909779548645, 0.11546295136213303, -0.023006398230791092, -0.1318666934967041, 0.049694158136844635, -0.004816250875592232, 0.08263860642910004, -0.21488013863563538, -0.0701230987906456, 0.09394881129264832, -0.055724915117025375, -0.014255840331315994, 0.08821612596511841, 0.0445919930934906, 0.025858314707875252, -0.044547028839588165, -0.11162777245044708, 0.029310178011655807, 0.10259439796209335, -0.06450067460536957, -0.03653968498110771 ]
434807a63004bd6e7ea2e3ed88b0447950a8dd1a
# Dataset Card for Evaluation run of wang7776/Llama-2-7b-chat-hf-20-attention-sparsity <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [wang7776/Llama-2-7b-chat-hf-20-attention-sparsity](https://huggingface.co/wang7776/Llama-2-7b-chat-hf-20-attention-sparsity) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_wang7776__Llama-2-7b-chat-hf-20-attention-sparsity", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-01-25T19:08:26.254884](https://huggingface.co/datasets/open-llm-leaderboard/details_wang7776__Llama-2-7b-chat-hf-20-attention-sparsity/blob/main/results_2024-01-25T19-08-26.254884.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.4761943213953472, "acc_stderr": 0.03435344147831958, "acc_norm": 0.4811025232857774, "acc_norm_stderr": 0.03510926677242792, "mc1": 0.30599755201958384, "mc1_stderr": 0.016132229728155045, "mc2": 0.45842205614099396, "mc2_stderr": 0.015621084603112444 }, "harness|arc:challenge|25": { "acc": 0.5008532423208191, "acc_stderr": 0.014611369529813272, "acc_norm": 0.5341296928327645, "acc_norm_stderr": 0.014577311315231102 }, "harness|hellaswag|10": { "acc": 0.59061939852619, "acc_stderr": 0.004907146229347549, "acc_norm": 0.7791276638119896, "acc_norm_stderr": 0.0041398679751162995 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.28, "acc_stderr": 0.04512608598542129, "acc_norm": 0.28, "acc_norm_stderr": 0.04512608598542129 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.42962962962962964, "acc_stderr": 0.04276349494376599, "acc_norm": 0.42962962962962964, "acc_norm_stderr": 0.04276349494376599 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.506578947368421, "acc_stderr": 0.040685900502249704, "acc_norm": 0.506578947368421, "acc_norm_stderr": 0.040685900502249704 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.49, "acc_stderr": 0.05024183937956912, "acc_norm": 0.49, "acc_norm_stderr": 0.05024183937956912 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.5169811320754717, "acc_stderr": 0.030755120364119905, "acc_norm": 0.5169811320754717, "acc_norm_stderr": 0.030755120364119905 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.5, "acc_stderr": 0.04181210050035455, "acc_norm": 0.5, "acc_norm_stderr": 0.04181210050035455 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.29, "acc_stderr": 0.04560480215720684, "acc_norm": 0.29, "acc_norm_stderr": 0.04560480215720684 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.39, "acc_stderr": 0.04902071300001975, "acc_norm": 0.39, "acc_norm_stderr": 0.04902071300001975 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.33, "acc_stderr": 0.047258156262526045, "acc_norm": 0.33, "acc_norm_stderr": 0.047258156262526045 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.3930635838150289, "acc_stderr": 0.03724249595817731, "acc_norm": 0.3930635838150289, "acc_norm_stderr": 0.03724249595817731 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.22549019607843138, "acc_stderr": 0.041583075330832865, "acc_norm": 0.22549019607843138, "acc_norm_stderr": 0.041583075330832865 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.54, "acc_stderr": 0.05009082659620332, "acc_norm": 0.54, "acc_norm_stderr": 0.05009082659620332 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.40425531914893614, "acc_stderr": 0.03208115750788684, "acc_norm": 0.40425531914893614, "acc_norm_stderr": 0.03208115750788684 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.3684210526315789, "acc_stderr": 0.04537815354939392, "acc_norm": 0.3684210526315789, "acc_norm_stderr": 0.04537815354939392 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.4827586206896552, "acc_stderr": 0.04164188720169377, "acc_norm": 0.4827586206896552, "acc_norm_stderr": 0.04164188720169377 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.28835978835978837, "acc_stderr": 0.02333065405453589, "acc_norm": 0.28835978835978837, "acc_norm_stderr": 0.02333065405453589 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.23809523809523808, "acc_stderr": 0.03809523809523811, "acc_norm": 0.23809523809523808, "acc_norm_stderr": 0.03809523809523811 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.34, "acc_stderr": 0.04760952285695235, "acc_norm": 0.34, "acc_norm_stderr": 0.04760952285695235 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.5225806451612903, "acc_stderr": 0.02841498501970786, "acc_norm": 0.5225806451612903, "acc_norm_stderr": 0.02841498501970786 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.3645320197044335, "acc_stderr": 0.033864057460620905, "acc_norm": 0.3645320197044335, "acc_norm_stderr": 0.033864057460620905 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.39, "acc_stderr": 0.04902071300001974, "acc_norm": 0.39, "acc_norm_stderr": 0.04902071300001974 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.5757575757575758, "acc_stderr": 0.03859268142070264, "acc_norm": 0.5757575757575758, "acc_norm_stderr": 0.03859268142070264 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.601010101010101, "acc_stderr": 0.03488901616852732, "acc_norm": 0.601010101010101, "acc_norm_stderr": 0.03488901616852732 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.6683937823834197, "acc_stderr": 0.03397636541089118, "acc_norm": 0.6683937823834197, "acc_norm_stderr": 0.03397636541089118 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.41025641025641024, "acc_stderr": 0.024939313906940777, "acc_norm": 0.41025641025641024, "acc_norm_stderr": 0.024939313906940777 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.25555555555555554, "acc_stderr": 0.026593939101844086, "acc_norm": 0.25555555555555554, "acc_norm_stderr": 0.026593939101844086 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.42016806722689076, "acc_stderr": 0.03206183783236152, "acc_norm": 0.42016806722689076, "acc_norm_stderr": 0.03206183783236152 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.31788079470198677, "acc_stderr": 0.03802039760107903, "acc_norm": 0.31788079470198677, "acc_norm_stderr": 0.03802039760107903 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.6587155963302752, "acc_stderr": 0.020328612816592442, "acc_norm": 0.6587155963302752, "acc_norm_stderr": 0.020328612816592442 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.32407407407407407, "acc_stderr": 0.03191923445686185, "acc_norm": 0.32407407407407407, "acc_norm_stderr": 0.03191923445686185 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.6470588235294118, "acc_stderr": 0.033540924375915195, "acc_norm": 0.6470588235294118, "acc_norm_stderr": 0.033540924375915195 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.6286919831223629, "acc_stderr": 0.0314506860074486, "acc_norm": 0.6286919831223629, "acc_norm_stderr": 0.0314506860074486 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.5650224215246636, "acc_stderr": 0.033272833702713445, "acc_norm": 0.5650224215246636, "acc_norm_stderr": 0.033272833702713445 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.5801526717557252, "acc_stderr": 0.04328577215262972, "acc_norm": 0.5801526717557252, "acc_norm_stderr": 0.04328577215262972 }, "harness|hendrycksTest-international_law|5": { "acc": 0.6198347107438017, "acc_stderr": 0.04431324501968431, "acc_norm": 0.6198347107438017, "acc_norm_stderr": 0.04431324501968431 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.5648148148148148, "acc_stderr": 0.04792898170907061, "acc_norm": 0.5648148148148148, "acc_norm_stderr": 0.04792898170907061 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.5398773006134969, "acc_stderr": 0.03915857291436971, "acc_norm": 0.5398773006134969, "acc_norm_stderr": 0.03915857291436971 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.32142857142857145, "acc_stderr": 0.04432804055291519, "acc_norm": 0.32142857142857145, "acc_norm_stderr": 0.04432804055291519 }, "harness|hendrycksTest-management|5": { "acc": 0.6699029126213593, "acc_stderr": 0.04656147110012351, "acc_norm": 0.6699029126213593, "acc_norm_stderr": 0.04656147110012351 }, "harness|hendrycksTest-marketing|5": { "acc": 0.7008547008547008, "acc_stderr": 0.02999695185834947, "acc_norm": 0.7008547008547008, "acc_norm_stderr": 0.02999695185834947 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.49, "acc_stderr": 0.05024183937956911, "acc_norm": 0.49, "acc_norm_stderr": 0.05024183937956911 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.6756066411238825, "acc_stderr": 0.0167409290471627, "acc_norm": 0.6756066411238825, "acc_norm_stderr": 0.0167409290471627 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.5173410404624278, "acc_stderr": 0.02690290045866664, "acc_norm": 0.5173410404624278, "acc_norm_stderr": 0.02690290045866664 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.2324022346368715, "acc_stderr": 0.01412596875467338, "acc_norm": 0.2324022346368715, "acc_norm_stderr": 0.01412596875467338 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.5065359477124183, "acc_stderr": 0.028627470550556054, "acc_norm": 0.5065359477124183, "acc_norm_stderr": 0.028627470550556054 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.5562700964630225, "acc_stderr": 0.02821768355665232, "acc_norm": 0.5562700964630225, "acc_norm_stderr": 0.02821768355665232 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.558641975308642, "acc_stderr": 0.027628737155668773, "acc_norm": 0.558641975308642, "acc_norm_stderr": 0.027628737155668773 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.36879432624113473, "acc_stderr": 0.028782227561347243, "acc_norm": 0.36879432624113473, "acc_norm_stderr": 0.028782227561347243 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.3494132985658409, "acc_stderr": 0.012177306252786686, "acc_norm": 0.3494132985658409, "acc_norm_stderr": 0.012177306252786686 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.4264705882352941, "acc_stderr": 0.030042615832714874, "acc_norm": 0.4264705882352941, "acc_norm_stderr": 0.030042615832714874 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.4624183006535948, "acc_stderr": 0.020170614974969768, "acc_norm": 0.4624183006535948, "acc_norm_stderr": 0.020170614974969768 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.5181818181818182, "acc_stderr": 0.04785964010794915, "acc_norm": 0.5181818181818182, "acc_norm_stderr": 0.04785964010794915 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.5183673469387755, "acc_stderr": 0.03198761546763127, "acc_norm": 0.5183673469387755, "acc_norm_stderr": 0.03198761546763127 }, "harness|hendrycksTest-sociology|5": { "acc": 0.6517412935323383, "acc_stderr": 0.033687874661154596, "acc_norm": 0.6517412935323383, "acc_norm_stderr": 0.033687874661154596 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.72, "acc_stderr": 0.04512608598542128, "acc_norm": 0.72, "acc_norm_stderr": 0.04512608598542128 }, "harness|hendrycksTest-virology|5": { "acc": 0.45180722891566266, "acc_stderr": 0.03874371556587953, "acc_norm": 0.45180722891566266, "acc_norm_stderr": 0.03874371556587953 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.7076023391812866, "acc_stderr": 0.03488647713457922, "acc_norm": 0.7076023391812866, "acc_norm_stderr": 0.03488647713457922 }, "harness|truthfulqa:mc|0": { "mc1": 0.30599755201958384, "mc1_stderr": 0.016132229728155045, "mc2": 0.45842205614099396, "mc2_stderr": 0.015621084603112444 }, "harness|winogrande|5": { "acc": 0.7071823204419889, "acc_stderr": 0.01278932111854262 }, "harness|gsm8k|5": { "acc": 0.177407126611069, "acc_stderr": 0.010522533016890783 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_wang7776__Llama-2-7b-chat-hf-20-attention-sparsity
[ "region:us" ]
2024-01-25T19:10:49+00:00
{"pretty_name": "Evaluation run of wang7776/Llama-2-7b-chat-hf-20-attention-sparsity", "dataset_summary": "Dataset automatically created during the evaluation run of model [wang7776/Llama-2-7b-chat-hf-20-attention-sparsity](https://huggingface.co/wang7776/Llama-2-7b-chat-hf-20-attention-sparsity) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_wang7776__Llama-2-7b-chat-hf-20-attention-sparsity\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-01-25T19:08:26.254884](https://huggingface.co/datasets/open-llm-leaderboard/details_wang7776__Llama-2-7b-chat-hf-20-attention-sparsity/blob/main/results_2024-01-25T19-08-26.254884.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.4761943213953472,\n \"acc_stderr\": 0.03435344147831958,\n \"acc_norm\": 0.4811025232857774,\n \"acc_norm_stderr\": 0.03510926677242792,\n \"mc1\": 0.30599755201958384,\n \"mc1_stderr\": 0.016132229728155045,\n \"mc2\": 0.45842205614099396,\n \"mc2_stderr\": 0.015621084603112444\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.5008532423208191,\n \"acc_stderr\": 0.014611369529813272,\n \"acc_norm\": 0.5341296928327645,\n \"acc_norm_stderr\": 0.014577311315231102\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.59061939852619,\n \"acc_stderr\": 0.004907146229347549,\n \"acc_norm\": 0.7791276638119896,\n \"acc_norm_stderr\": 0.0041398679751162995\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.28,\n \"acc_stderr\": 0.04512608598542129,\n \"acc_norm\": 0.28,\n \"acc_norm_stderr\": 0.04512608598542129\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.42962962962962964,\n \"acc_stderr\": 0.04276349494376599,\n \"acc_norm\": 0.42962962962962964,\n \"acc_norm_stderr\": 0.04276349494376599\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.506578947368421,\n \"acc_stderr\": 0.040685900502249704,\n \"acc_norm\": 0.506578947368421,\n \"acc_norm_stderr\": 0.040685900502249704\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.49,\n \"acc_stderr\": 0.05024183937956912,\n \"acc_norm\": 0.49,\n \"acc_norm_stderr\": 0.05024183937956912\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.5169811320754717,\n \"acc_stderr\": 0.030755120364119905,\n \"acc_norm\": 0.5169811320754717,\n \"acc_norm_stderr\": 0.030755120364119905\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.5,\n \"acc_stderr\": 0.04181210050035455,\n \"acc_norm\": 0.5,\n \"acc_norm_stderr\": 0.04181210050035455\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.29,\n \"acc_stderr\": 0.04560480215720684,\n \"acc_norm\": 0.29,\n \"acc_norm_stderr\": 0.04560480215720684\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.39,\n \"acc_stderr\": 0.04902071300001975,\n \"acc_norm\": 0.39,\n \"acc_norm_stderr\": 0.04902071300001975\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.33,\n \"acc_stderr\": 0.047258156262526045,\n \"acc_norm\": 0.33,\n \"acc_norm_stderr\": 0.047258156262526045\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.3930635838150289,\n \"acc_stderr\": 0.03724249595817731,\n \"acc_norm\": 0.3930635838150289,\n \"acc_norm_stderr\": 0.03724249595817731\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.22549019607843138,\n \"acc_stderr\": 0.041583075330832865,\n \"acc_norm\": 0.22549019607843138,\n \"acc_norm_stderr\": 0.041583075330832865\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.54,\n \"acc_stderr\": 0.05009082659620332,\n \"acc_norm\": 0.54,\n \"acc_norm_stderr\": 0.05009082659620332\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.40425531914893614,\n \"acc_stderr\": 0.03208115750788684,\n \"acc_norm\": 0.40425531914893614,\n \"acc_norm_stderr\": 0.03208115750788684\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.3684210526315789,\n \"acc_stderr\": 0.04537815354939392,\n \"acc_norm\": 0.3684210526315789,\n \"acc_norm_stderr\": 0.04537815354939392\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.4827586206896552,\n \"acc_stderr\": 0.04164188720169377,\n \"acc_norm\": 0.4827586206896552,\n \"acc_norm_stderr\": 0.04164188720169377\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.28835978835978837,\n \"acc_stderr\": 0.02333065405453589,\n \"acc_norm\": 0.28835978835978837,\n \"acc_norm_stderr\": 0.02333065405453589\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.23809523809523808,\n \"acc_stderr\": 0.03809523809523811,\n \"acc_norm\": 0.23809523809523808,\n \"acc_norm_stderr\": 0.03809523809523811\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.34,\n \"acc_stderr\": 0.04760952285695235,\n \"acc_norm\": 0.34,\n \"acc_norm_stderr\": 0.04760952285695235\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.5225806451612903,\n \"acc_stderr\": 0.02841498501970786,\n \"acc_norm\": 0.5225806451612903,\n \"acc_norm_stderr\": 0.02841498501970786\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.3645320197044335,\n \"acc_stderr\": 0.033864057460620905,\n \"acc_norm\": 0.3645320197044335,\n \"acc_norm_stderr\": 0.033864057460620905\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.39,\n \"acc_stderr\": 0.04902071300001974,\n \"acc_norm\": 0.39,\n \"acc_norm_stderr\": 0.04902071300001974\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.5757575757575758,\n \"acc_stderr\": 0.03859268142070264,\n \"acc_norm\": 0.5757575757575758,\n \"acc_norm_stderr\": 0.03859268142070264\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.601010101010101,\n \"acc_stderr\": 0.03488901616852732,\n \"acc_norm\": 0.601010101010101,\n \"acc_norm_stderr\": 0.03488901616852732\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.6683937823834197,\n \"acc_stderr\": 0.03397636541089118,\n \"acc_norm\": 0.6683937823834197,\n \"acc_norm_stderr\": 0.03397636541089118\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.41025641025641024,\n \"acc_stderr\": 0.024939313906940777,\n \"acc_norm\": 0.41025641025641024,\n \"acc_norm_stderr\": 0.024939313906940777\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.25555555555555554,\n \"acc_stderr\": 0.026593939101844086,\n \"acc_norm\": 0.25555555555555554,\n \"acc_norm_stderr\": 0.026593939101844086\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.42016806722689076,\n \"acc_stderr\": 0.03206183783236152,\n \"acc_norm\": 0.42016806722689076,\n \"acc_norm_stderr\": 0.03206183783236152\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.31788079470198677,\n \"acc_stderr\": 0.03802039760107903,\n \"acc_norm\": 0.31788079470198677,\n \"acc_norm_stderr\": 0.03802039760107903\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.6587155963302752,\n \"acc_stderr\": 0.020328612816592442,\n \"acc_norm\": 0.6587155963302752,\n \"acc_norm_stderr\": 0.020328612816592442\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.32407407407407407,\n \"acc_stderr\": 0.03191923445686185,\n \"acc_norm\": 0.32407407407407407,\n \"acc_norm_stderr\": 0.03191923445686185\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.6470588235294118,\n \"acc_stderr\": 0.033540924375915195,\n \"acc_norm\": 0.6470588235294118,\n \"acc_norm_stderr\": 0.033540924375915195\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.6286919831223629,\n \"acc_stderr\": 0.0314506860074486,\n \"acc_norm\": 0.6286919831223629,\n \"acc_norm_stderr\": 0.0314506860074486\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.5650224215246636,\n \"acc_stderr\": 0.033272833702713445,\n \"acc_norm\": 0.5650224215246636,\n \"acc_norm_stderr\": 0.033272833702713445\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.5801526717557252,\n \"acc_stderr\": 0.04328577215262972,\n \"acc_norm\": 0.5801526717557252,\n \"acc_norm_stderr\": 0.04328577215262972\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.6198347107438017,\n \"acc_stderr\": 0.04431324501968431,\n \"acc_norm\": 0.6198347107438017,\n \"acc_norm_stderr\": 0.04431324501968431\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.5648148148148148,\n \"acc_stderr\": 0.04792898170907061,\n \"acc_norm\": 0.5648148148148148,\n \"acc_norm_stderr\": 0.04792898170907061\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.5398773006134969,\n \"acc_stderr\": 0.03915857291436971,\n \"acc_norm\": 0.5398773006134969,\n \"acc_norm_stderr\": 0.03915857291436971\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.32142857142857145,\n \"acc_stderr\": 0.04432804055291519,\n \"acc_norm\": 0.32142857142857145,\n \"acc_norm_stderr\": 0.04432804055291519\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.6699029126213593,\n \"acc_stderr\": 0.04656147110012351,\n \"acc_norm\": 0.6699029126213593,\n \"acc_norm_stderr\": 0.04656147110012351\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.7008547008547008,\n \"acc_stderr\": 0.02999695185834947,\n \"acc_norm\": 0.7008547008547008,\n \"acc_norm_stderr\": 0.02999695185834947\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.49,\n \"acc_stderr\": 0.05024183937956911,\n \"acc_norm\": 0.49,\n \"acc_norm_stderr\": 0.05024183937956911\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.6756066411238825,\n \"acc_stderr\": 0.0167409290471627,\n \"acc_norm\": 0.6756066411238825,\n \"acc_norm_stderr\": 0.0167409290471627\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.5173410404624278,\n \"acc_stderr\": 0.02690290045866664,\n \"acc_norm\": 0.5173410404624278,\n \"acc_norm_stderr\": 0.02690290045866664\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.2324022346368715,\n \"acc_stderr\": 0.01412596875467338,\n \"acc_norm\": 0.2324022346368715,\n \"acc_norm_stderr\": 0.01412596875467338\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.5065359477124183,\n \"acc_stderr\": 0.028627470550556054,\n \"acc_norm\": 0.5065359477124183,\n \"acc_norm_stderr\": 0.028627470550556054\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.5562700964630225,\n \"acc_stderr\": 0.02821768355665232,\n \"acc_norm\": 0.5562700964630225,\n \"acc_norm_stderr\": 0.02821768355665232\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.558641975308642,\n \"acc_stderr\": 0.027628737155668773,\n \"acc_norm\": 0.558641975308642,\n \"acc_norm_stderr\": 0.027628737155668773\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.36879432624113473,\n \"acc_stderr\": 0.028782227561347243,\n \"acc_norm\": 0.36879432624113473,\n \"acc_norm_stderr\": 0.028782227561347243\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.3494132985658409,\n \"acc_stderr\": 0.012177306252786686,\n \"acc_norm\": 0.3494132985658409,\n \"acc_norm_stderr\": 0.012177306252786686\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.4264705882352941,\n \"acc_stderr\": 0.030042615832714874,\n \"acc_norm\": 0.4264705882352941,\n \"acc_norm_stderr\": 0.030042615832714874\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.4624183006535948,\n \"acc_stderr\": 0.020170614974969768,\n \"acc_norm\": 0.4624183006535948,\n \"acc_norm_stderr\": 0.020170614974969768\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.5181818181818182,\n \"acc_stderr\": 0.04785964010794915,\n \"acc_norm\": 0.5181818181818182,\n \"acc_norm_stderr\": 0.04785964010794915\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.5183673469387755,\n \"acc_stderr\": 0.03198761546763127,\n \"acc_norm\": 0.5183673469387755,\n \"acc_norm_stderr\": 0.03198761546763127\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.6517412935323383,\n \"acc_stderr\": 0.033687874661154596,\n \"acc_norm\": 0.6517412935323383,\n \"acc_norm_stderr\": 0.033687874661154596\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.72,\n \"acc_stderr\": 0.04512608598542128,\n \"acc_norm\": 0.72,\n \"acc_norm_stderr\": 0.04512608598542128\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.45180722891566266,\n \"acc_stderr\": 0.03874371556587953,\n \"acc_norm\": 0.45180722891566266,\n \"acc_norm_stderr\": 0.03874371556587953\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.7076023391812866,\n \"acc_stderr\": 0.03488647713457922,\n \"acc_norm\": 0.7076023391812866,\n \"acc_norm_stderr\": 0.03488647713457922\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.30599755201958384,\n \"mc1_stderr\": 0.016132229728155045,\n \"mc2\": 0.45842205614099396,\n \"mc2_stderr\": 0.015621084603112444\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7071823204419889,\n \"acc_stderr\": 0.01278932111854262\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.177407126611069,\n \"acc_stderr\": 0.010522533016890783\n }\n}\n```", "repo_url": "https://huggingface.co/wang7776/Llama-2-7b-chat-hf-20-attention-sparsity", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_01_25T19_08_26.254884", "path": ["**/details_harness|arc:challenge|25_2024-01-25T19-08-26.254884.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-01-25T19-08-26.254884.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_01_25T19_08_26.254884", "path": ["**/details_harness|gsm8k|5_2024-01-25T19-08-26.254884.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-01-25T19-08-26.254884.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_01_25T19_08_26.254884", "path": ["**/details_harness|hellaswag|10_2024-01-25T19-08-26.254884.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-01-25T19-08-26.254884.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_01_25T19_08_26.254884", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-25T19-08-26.254884.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-25T19-08-26.254884.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-25T19-08-26.254884.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-25T19-08-26.254884.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-25T19-08-26.254884.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-25T19-08-26.254884.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-25T19-08-26.254884.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-25T19-08-26.254884.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-25T19-08-26.254884.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-25T19-08-26.254884.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-25T19-08-26.254884.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-25T19-08-26.254884.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-25T19-08-26.254884.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-25T19-08-26.254884.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-25T19-08-26.254884.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-25T19-08-26.254884.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-25T19-08-26.254884.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-25T19-08-26.254884.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-25T19-08-26.254884.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-25T19-08-26.254884.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-25T19-08-26.254884.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-25T19-08-26.254884.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-25T19-08-26.254884.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-25T19-08-26.254884.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-25T19-08-26.254884.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-25T19-08-26.254884.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-25T19-08-26.254884.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-25T19-08-26.254884.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-25T19-08-26.254884.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-25T19-08-26.254884.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-25T19-08-26.254884.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-25T19-08-26.254884.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-25T19-08-26.254884.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-25T19-08-26.254884.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-25T19-08-26.254884.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-25T19-08-26.254884.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-25T19-08-26.254884.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-25T19-08-26.254884.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-25T19-08-26.254884.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-25T19-08-26.254884.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-25T19-08-26.254884.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-25T19-08-26.254884.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-25T19-08-26.254884.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-25T19-08-26.254884.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-25T19-08-26.254884.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-25T19-08-26.254884.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-25T19-08-26.254884.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-25T19-08-26.254884.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-25T19-08-26.254884.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-25T19-08-26.254884.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-25T19-08-26.254884.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-25T19-08-26.254884.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-25T19-08-26.254884.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-25T19-08-26.254884.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-25T19-08-26.254884.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-25T19-08-26.254884.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-25T19-08-26.254884.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-25T19-08-26.254884.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-25T19-08-26.254884.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-25T19-08-26.254884.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-25T19-08-26.254884.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-25T19-08-26.254884.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-25T19-08-26.254884.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-25T19-08-26.254884.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-25T19-08-26.254884.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-25T19-08-26.254884.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-25T19-08-26.254884.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-25T19-08-26.254884.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-25T19-08-26.254884.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-25T19-08-26.254884.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-25T19-08-26.254884.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-25T19-08-26.254884.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-25T19-08-26.254884.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-25T19-08-26.254884.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-25T19-08-26.254884.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-25T19-08-26.254884.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-25T19-08-26.254884.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-25T19-08-26.254884.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-25T19-08-26.254884.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-25T19-08-26.254884.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-25T19-08-26.254884.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-25T19-08-26.254884.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-25T19-08-26.254884.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-25T19-08-26.254884.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-25T19-08-26.254884.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-25T19-08-26.254884.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-25T19-08-26.254884.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-25T19-08-26.254884.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-25T19-08-26.254884.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-25T19-08-26.254884.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-25T19-08-26.254884.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-25T19-08-26.254884.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-25T19-08-26.254884.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-25T19-08-26.254884.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-25T19-08-26.254884.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-25T19-08-26.254884.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-25T19-08-26.254884.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-25T19-08-26.254884.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-25T19-08-26.254884.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-25T19-08-26.254884.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-25T19-08-26.254884.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-25T19-08-26.254884.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-25T19-08-26.254884.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-25T19-08-26.254884.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-25T19-08-26.254884.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-25T19-08-26.254884.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-25T19-08-26.254884.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-25T19-08-26.254884.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-25T19-08-26.254884.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-25T19-08-26.254884.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-25T19-08-26.254884.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-25T19-08-26.254884.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-25T19-08-26.254884.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-25T19-08-26.254884.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_01_25T19_08_26.254884", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-25T19-08-26.254884.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-25T19-08-26.254884.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_01_25T19_08_26.254884", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-25T19-08-26.254884.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-25T19-08-26.254884.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_01_25T19_08_26.254884", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-25T19-08-26.254884.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-25T19-08-26.254884.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_01_25T19_08_26.254884", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-25T19-08-26.254884.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-25T19-08-26.254884.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_01_25T19_08_26.254884", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-25T19-08-26.254884.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-25T19-08-26.254884.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_01_25T19_08_26.254884", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-25T19-08-26.254884.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-25T19-08-26.254884.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_01_25T19_08_26.254884", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-25T19-08-26.254884.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-25T19-08-26.254884.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_01_25T19_08_26.254884", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-25T19-08-26.254884.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-25T19-08-26.254884.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_01_25T19_08_26.254884", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-25T19-08-26.254884.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-25T19-08-26.254884.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_01_25T19_08_26.254884", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-25T19-08-26.254884.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-25T19-08-26.254884.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_01_25T19_08_26.254884", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-25T19-08-26.254884.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-25T19-08-26.254884.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_01_25T19_08_26.254884", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-25T19-08-26.254884.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-25T19-08-26.254884.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_01_25T19_08_26.254884", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-25T19-08-26.254884.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-25T19-08-26.254884.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_01_25T19_08_26.254884", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-25T19-08-26.254884.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-25T19-08-26.254884.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_01_25T19_08_26.254884", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-25T19-08-26.254884.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-25T19-08-26.254884.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_01_25T19_08_26.254884", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-25T19-08-26.254884.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-25T19-08-26.254884.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_01_25T19_08_26.254884", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-25T19-08-26.254884.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-25T19-08-26.254884.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_01_25T19_08_26.254884", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-25T19-08-26.254884.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-25T19-08-26.254884.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_01_25T19_08_26.254884", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-25T19-08-26.254884.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-25T19-08-26.254884.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_01_25T19_08_26.254884", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-25T19-08-26.254884.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-25T19-08-26.254884.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_01_25T19_08_26.254884", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-25T19-08-26.254884.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-25T19-08-26.254884.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_01_25T19_08_26.254884", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-25T19-08-26.254884.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-25T19-08-26.254884.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_01_25T19_08_26.254884", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-25T19-08-26.254884.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-25T19-08-26.254884.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_01_25T19_08_26.254884", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-25T19-08-26.254884.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-25T19-08-26.254884.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_01_25T19_08_26.254884", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-25T19-08-26.254884.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-25T19-08-26.254884.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_01_25T19_08_26.254884", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-25T19-08-26.254884.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-25T19-08-26.254884.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_01_25T19_08_26.254884", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-25T19-08-26.254884.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-25T19-08-26.254884.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_01_25T19_08_26.254884", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-25T19-08-26.254884.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-25T19-08-26.254884.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_01_25T19_08_26.254884", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-25T19-08-26.254884.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-25T19-08-26.254884.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_01_25T19_08_26.254884", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-25T19-08-26.254884.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-25T19-08-26.254884.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_01_25T19_08_26.254884", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-25T19-08-26.254884.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-25T19-08-26.254884.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_01_25T19_08_26.254884", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-25T19-08-26.254884.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-25T19-08-26.254884.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_01_25T19_08_26.254884", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-25T19-08-26.254884.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-25T19-08-26.254884.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_01_25T19_08_26.254884", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-25T19-08-26.254884.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-25T19-08-26.254884.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_01_25T19_08_26.254884", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-25T19-08-26.254884.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-25T19-08-26.254884.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_01_25T19_08_26.254884", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-25T19-08-26.254884.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-25T19-08-26.254884.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_01_25T19_08_26.254884", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-25T19-08-26.254884.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-25T19-08-26.254884.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_01_25T19_08_26.254884", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-25T19-08-26.254884.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-25T19-08-26.254884.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_01_25T19_08_26.254884", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-25T19-08-26.254884.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-25T19-08-26.254884.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_01_25T19_08_26.254884", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-25T19-08-26.254884.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-25T19-08-26.254884.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_01_25T19_08_26.254884", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-25T19-08-26.254884.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-25T19-08-26.254884.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_01_25T19_08_26.254884", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-25T19-08-26.254884.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-25T19-08-26.254884.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_01_25T19_08_26.254884", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-25T19-08-26.254884.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-25T19-08-26.254884.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_01_25T19_08_26.254884", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-25T19-08-26.254884.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-25T19-08-26.254884.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_01_25T19_08_26.254884", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-25T19-08-26.254884.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-25T19-08-26.254884.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_01_25T19_08_26.254884", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-25T19-08-26.254884.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-25T19-08-26.254884.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_01_25T19_08_26.254884", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-25T19-08-26.254884.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-25T19-08-26.254884.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_01_25T19_08_26.254884", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-25T19-08-26.254884.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-25T19-08-26.254884.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_01_25T19_08_26.254884", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-25T19-08-26.254884.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-25T19-08-26.254884.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_01_25T19_08_26.254884", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-25T19-08-26.254884.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-25T19-08-26.254884.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_01_25T19_08_26.254884", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-25T19-08-26.254884.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-25T19-08-26.254884.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_01_25T19_08_26.254884", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-25T19-08-26.254884.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-25T19-08-26.254884.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_01_25T19_08_26.254884", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-25T19-08-26.254884.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-25T19-08-26.254884.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_01_25T19_08_26.254884", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-25T19-08-26.254884.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-25T19-08-26.254884.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_01_25T19_08_26.254884", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-25T19-08-26.254884.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-25T19-08-26.254884.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_01_25T19_08_26.254884", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-25T19-08-26.254884.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-25T19-08-26.254884.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_01_25T19_08_26.254884", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-25T19-08-26.254884.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-25T19-08-26.254884.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_01_25T19_08_26.254884", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-25T19-08-26.254884.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-25T19-08-26.254884.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_01_25T19_08_26.254884", "path": ["**/details_harness|winogrande|5_2024-01-25T19-08-26.254884.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-01-25T19-08-26.254884.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_01_25T19_08_26.254884", "path": ["results_2024-01-25T19-08-26.254884.parquet"]}, {"split": "latest", "path": ["results_2024-01-25T19-08-26.254884.parquet"]}]}]}
2024-01-25T19:11:13+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of wang7776/Llama-2-7b-chat-hf-20-attention-sparsity Dataset automatically created during the evaluation run of model wang7776/Llama-2-7b-chat-hf-20-attention-sparsity on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-01-25T19:08:26.254884(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of wang7776/Llama-2-7b-chat-hf-20-attention-sparsity\n\n\n\nDataset automatically created during the evaluation run of model wang7776/Llama-2-7b-chat-hf-20-attention-sparsity on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-25T19:08:26.254884(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of wang7776/Llama-2-7b-chat-hf-20-attention-sparsity\n\n\n\nDataset automatically created during the evaluation run of model wang7776/Llama-2-7b-chat-hf-20-attention-sparsity on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-25T19:08:26.254884(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ 6, 205, 68, 4, 40, 29, 3, 4, 9, 6, 5, 7, 4, 7, 10, 9, 5, 9, 8, 10, 46, 8, 7, 10, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of wang7776/Llama-2-7b-chat-hf-20-attention-sparsity\n\n\n\nDataset automatically created during the evaluation run of model wang7776/Llama-2-7b-chat-hf-20-attention-sparsity on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2024-01-25T19:08:26.254884(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):## Dataset Details### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:## Uses### Direct Use### Out-of-Scope Use## Dataset Structure## Dataset Creation### Curation Rationale### Source Data#### Data Collection and Processing#### Who are the source data producers?### Annotations [optional]#### Annotation process#### Who are the annotators?#### Personal and Sensitive Information## Bias, Risks, and Limitations### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:## Glossary [optional]" ]
[ -0.0894879624247551, 0.17753474414348602, -0.005256677512079477, 0.02316313609480858, 0.05711543560028076, -0.039100658148527145, 0.002759329043328762, 0.13263607025146484, 0.023604292422533035, 0.17163938283920288, -0.026576749980449677, 0.07335324585437775, 0.09516458213329315, 0.20207099616527557, 0.016366994008421898, -0.14591142535209656, 0.031053969636559486, -0.07100854814052582, 0.07629773765802383, 0.07501844316720963, 0.08270867168903351, -0.10129430890083313, 0.07036859542131424, -0.0022841563913971186, -0.04106226935982704, 0.0007306659827008843, -0.08587965369224548, -0.04198984429240227, 0.09390979260206223, 0.07048928737640381, 0.012403452768921852, -0.016737723723053932, -0.008589657954871655, -0.2327747941017151, 0.023673636838793755, 0.07722822576761246, 0.029792658984661102, 0.07839027792215347, 0.12324809283018112, -0.07754898071289062, 0.13871720433235168, -0.07059897482395172, 0.032537296414375305, 0.04452170059084892, -0.1132790595293045, -0.06872285902500153, -0.1695275455713272, 0.015392123721539974, 0.08223161101341248, 0.05780290067195892, -0.02865517884492874, 0.12454917281866074, -0.007317586336284876, 0.029815636575222015, 0.17315703630447388, -0.1488521695137024, -0.02111143060028553, -0.01815546676516533, 0.03533139079809189, 0.04033436253666878, -0.12078879773616791, -0.02021600306034088, 0.030721493065357208, 0.0348195880651474, -0.006338993087410927, 0.016976596787571907, -0.015570740215480328, 0.022238396108150482, -0.13301599025726318, -0.07081287354230881, 0.11516190320253372, 0.015250599943101406, -0.0565943568944931, -0.12769778072834015, -0.0414448156952858, -0.08318352699279785, -0.015989506617188454, -0.004847431089729071, 0.021552423015236855, -0.022219274193048477, 0.06368415802717209, -0.02684393711388111, -0.09182571619749069, -0.010114094242453575, -0.03471316024661064, 0.052554745227098465, 0.015681304037570953, 0.007921092212200165, -0.0016891290433704853, 0.10889765620231628, 0.03258008882403374, -0.09472722560167313, -0.12825553119182587, -0.036286860704422, -0.11638201773166656, -0.03870037570595741, 0.00619666138663888, -0.05753520503640175, 0.039598770439624786, 0.20765680074691772, -0.09703540056943893, 0.02279200777411461, -0.09881365299224854, -0.003195028053596616, 0.09951815754175186, 0.08035314828157425, -0.016501517966389656, -0.053852926939725876, -0.001252642017789185, 0.03150869160890579, 0.04158104583621025, -0.010777454823255539, 0.03537201136350632, 0.06928647309541702, 0.03517890349030495, 0.11644894629716873, 0.10248132050037384, 0.004853953141719103, -0.05110381543636322, -0.0251612551510334, 0.17348529398441315, -0.1861603558063507, -0.010032573714852333, 0.004880815744400024, -0.05320168286561966, -0.11453118175268173, 0.05251568928360939, -0.010350625962018967, -0.060444023460149765, 0.08098709583282471, -0.07901462912559509, -0.04993331432342529, -0.08181644976139069, -0.032780345529317856, 0.054202284663915634, -0.033940646797418594, -0.0568300299346447, -0.05047497898340225, -0.12531550228595734, -0.08666786551475525, -0.017774952575564384, -0.08669508993625641, -0.005848338361829519, 0.030594846233725548, -0.011263610795140266, -0.026068951934576035, -0.021961910650134087, 0.11969545483589172, -0.08259247243404388, 0.022700801491737366, -0.00017512482008896768, 0.004659096710383892, 0.07037627696990967, 0.041463665664196014, -0.1379328966140747, 0.08031405508518219, -0.0901644378900528, 0.11591871082782745, -0.09638014435768127, 0.017392832785844803, -0.12747380137443542, -0.006176733877509832, -0.0590423047542572, 0.01379639096558094, 0.012516525574028492, 0.11442095786333084, -0.2312799096107483, -0.015251939184963703, 0.13070176541805267, -0.11972800642251968, -0.09805750846862793, 0.04034195840358734, -0.022038064897060394, 0.04395315796136856, 0.06114117428660393, 0.09654409438371658, 0.0772923082113266, -0.053844619542360306, -0.135452538728714, -0.09836919605731964, -0.00434721726924181, 0.11757918447256088, 0.04261745885014534, -0.06186527758836746, 0.15069842338562012, 0.03497498854994774, -0.00444013299420476, -0.0461094006896019, -0.020735519006848335, -0.05435905233025551, -0.017915695905685425, -0.053452540189027786, -0.11937854439020157, -0.003716917708516121, -0.047714587301015854, -0.0338781401515007, -0.07502506673336029, 0.060106221586465836, 0.10277987271547318, 0.011707314290106297, 0.02181345410645008, -0.1063457801938057, 0.06966830044984818, 0.023529667407274246, 0.006894201505929232, -0.22188153862953186, -0.08959093689918518, 0.03956077992916107, -0.11984873563051224, 0.02881803922355175, 0.01078301016241312, 0.009185366332530975, 0.03342524915933609, 0.0070816585794091225, 0.021474052220582962, 0.0029986409936100245, -0.005168394651263952, -0.022942809388041496, -0.13889046013355255, -0.040538933128118515, -0.06840752065181732, 0.09956637024879456, -0.15477454662322998, -0.032786693423986435, 0.12496992945671082, 0.20031258463859558, 0.024267084896564484, -0.10625655949115753, 0.07333742827177048, -0.0006511428509838879, -0.04276794567704201, -0.07011677324771881, 0.000743495358619839, -0.007299307268112898, 0.05381042882800102, 0.06390709429979324, -0.17472654581069946, -0.11599621176719666, 0.07854214310646057, 0.1518857628107071, -0.03779950365424156, -0.05595971271395683, -0.11207164824008942, -0.040926381945610046, -0.08459747582674026, -0.044416286051273346, 0.06344423443078995, 0.0674145519733429, 0.030340446159243584, -0.06395582109689713, -0.09410521388053894, -0.007199169136583805, 0.08401945978403091, -0.06629832834005356, 0.10101984441280365, 0.048728231340646744, -0.12824182212352753, 0.08870638161897659, -0.0011312938295304775, 0.1498161256313324, 0.11475593596696854, -0.008417848497629166, -0.1326470524072647, 0.003895391011610627, 0.05169370397925377, 0.032264433801174164, 0.09025182574987411, -0.022191692143678665, 0.008741170167922974, 0.06953545659780502, -0.015790918841958046, 0.062398411333560944, -0.06413242220878601, 0.038332510739564896, 0.01783526875078678, -0.010586525313556194, 0.04317932203412056, 0.022206252440810204, 0.01760035753250122, 0.07280471175909042, 0.03720023110508919, 0.10558689385652542, -0.033998776227235794, -0.04648446664214134, -0.09796742349863052, 0.14597152173519135, -0.08352278918027878, -0.2523452043533325, -0.14205802977085114, -0.04972955584526062, -0.011609714478254318, -0.009871955960988998, 0.047623638063669205, -0.010376624763011932, -0.09591871500015259, -0.12513329088687897, 0.04314446821808815, 0.05226349085569382, -0.0841592326760292, -0.009445921517908573, 0.02504703588783741, 0.03072528727352619, -0.16811789572238922, 0.03280653804540634, 0.01657763496041298, -0.04314142465591431, -0.027355704456567764, 0.09383983165025711, 0.11787595599889755, 0.05932045355439186, 0.04553915187716484, -0.01239772792905569, -0.007096041459590197, 0.20013807713985443, -0.09843727201223373, 0.02260122448205948, 0.11000598967075348, -0.0444534569978714, 0.04744099825620651, 0.1204746663570404, -0.004987560678273439, -0.09402931481599808, 0.03500184789299965, 0.09725438058376312, -0.052926626056432724, -0.2762090265750885, -0.07061504572629929, -0.039800770580768585, -0.01917390711605549, 0.08763481676578522, 0.10315585136413574, -0.01583912782371044, -0.007231341674923897, -0.11482703685760498, -0.05488375574350357, -0.06954064220190048, 0.04641008377075195, 0.05633384734392166, 0.022283630445599556, 0.04757443815469742, -0.055267319083213806, 0.056387484073638916, 0.11673317849636078, -0.009540034458041191, 0.15457625687122345, -0.061417464166879654, 0.15999118983745575, 0.09425245970487595, 0.09284193813800812, -0.008291545324027538, 0.05546613410115242, -0.011693516746163368, 0.04981943964958191, 0.01317526027560234, -0.09786659479141235, -0.056804072111845016, 0.08900637179613113, -0.013123935088515282, -0.021695366129279137, 0.05465138331055641, -0.028835033997893333, 0.06632567197084427, 0.203817680478096, 0.019457528367638588, -0.15967696905136108, -0.09159871935844421, 0.04608781263232231, -0.043156396597623825, -0.08643245697021484, -0.01921885274350643, 0.06239641457796097, -0.11713059991598129, 0.044875238090753555, -0.03127838298678398, 0.08199551701545715, -0.121291384100914, -0.018124647438526154, 0.007917817682027817, 0.07882027328014374, -0.020521817728877068, 0.08296142518520355, -0.11445526778697968, 0.1060088723897934, -0.005017020273953676, 0.05340017005801201, -0.06214349716901779, 0.05369037017226219, 0.005639370996505022, -0.06652863323688507, 0.11673086881637573, 0.014527791179716587, -0.11636961251497269, -0.03403937444090843, -0.1304192841053009, 0.016129665076732635, 0.04637448117136955, -0.1057579517364502, 0.10773000866174698, 0.021211674436926842, -0.01845240592956543, -0.03825680911540985, -0.039473846554756165, -0.11100289970636368, -0.19889624416828156, 0.12559692561626434, -0.1279129981994629, 0.09008324891328812, -0.07569712400436401, -0.05454782024025917, -0.01980143040418625, 0.195588156580925, -0.08594395220279694, -0.06752153486013412, -0.13994625210762024, 0.09156736731529236, 0.16563370823860168, -0.06214543431997299, 0.07491357624530792, -0.02588925138115883, 0.2030855119228363, 0.016018826514482498, -0.05981915816664696, 0.012201648205518723, -0.06740115582942963, -0.1746293157339096, -0.024811990559101105, 0.1559571474790573, 0.06525567173957825, -0.004288387950509787, -0.0037735672667622566, 0.06239599362015724, 0.016245435923337936, -0.08413451910018921, 0.06392905116081238, 0.07902375608682632, 0.0686338022351265, 0.01520945318043232, -0.03256863355636597, -0.0932629257440567, -0.10889934748411179, -0.10079747438430786, 0.059917427599430084, 0.14818470180034637, -0.034969788044691086, 0.1442798227071762, 0.08663219213485718, -0.09654008597135544, -0.16625311970710754, -0.023909784853458405, 0.01636574976146221, -0.0033802480902522802, 0.09578094631433487, -0.18208004534244537, 0.09368310123682022, 0.09288410097360611, -0.024881580844521523, 0.15779471397399902, -0.2117394506931305, -0.13976320624351501, 0.027570905163884163, 0.019381895661354065, -0.17568530142307281, -0.13786228001117706, -0.13474148511886597, -0.00896512996405363, -0.1594439148902893, 0.12267526239156723, 0.0014704061904922128, 0.02691577933728695, -0.024770839139819145, 0.04979223757982254, 0.03886739909648895, -0.046029720455408096, 0.12073367089033127, -0.01580261066555977, 0.014094892889261246, -0.08494195342063904, -0.008009389042854309, -0.011197066865861416, -0.058556050062179565, 0.05657251924276352, 0.042570117861032486, 0.06642191857099533, -0.09843940287828445, -0.027839720249176025, -0.04997732490301132, 0.0729249119758606, -0.05839576944708824, -0.03352172672748566, -0.062168776988983154, 0.08085449784994125, 0.0499948114156723, -0.011442307382822037, 0.06131920963525772, -0.04137461259961128, 0.07055584341287613, 0.17878441512584686, 0.06041190028190613, -0.000725316524039954, -0.06454948335886002, -0.02939176745712757, 0.006672615651041269, -0.017505623400211334, -0.12173870205879211, 0.04273470118641853, 0.10401757806539536, 0.05393627658486366, 0.0818604901432991, -0.019255544990301132, -0.20105724036693573, 0.011175482533872128, 0.10483487695455551, -0.12033858895301819, -0.17027407884597778, 0.017030924558639526, 0.12704743444919586, -0.11260432749986649, -0.03864442557096481, 0.10554133355617523, 0.022387750446796417, -0.03783112391829491, 0.010232929140329361, 0.07466600835323334, 0.053041040897369385, 0.09910248219966888, -0.021515507251024246, 0.04947284236550331, -0.08389826118946075, 0.11014833301305771, 0.120432049036026, -0.09913302958011627, 0.005605968181043863, 0.11965931206941605, -0.06700637936592102, -0.03566307574510574, -0.029672354459762573, 0.01836831122636795, -0.013587245717644691, -0.030135372653603554, -0.013316113501787186, -0.05848204344511032, 0.0806373804807663, 0.18110260367393494, -0.008965641260147095, 0.0616472065448761, 0.03541675955057144, 0.005901120137423277, -0.029623573645949364, 0.1095835492014885, 0.021174201741814613, 0.04903830587863922, -0.022785518318414688, 0.028640327975153923, 0.02133817784488201, -0.007085722871124744, 0.012120634317398071, -0.044638585299253464, -0.027168842032551765, -0.022384556010365486, -0.18008045852184296, 0.013591627590358257, -0.08784902095794678, -0.03607442229986191, -0.025918560102581978, -0.05441852658987045, -0.04859049245715141, 0.04056252911686897, -0.06803324818611145, -0.06629083305597305, -0.0705704465508461, 0.09026475250720978, -0.20006206631660461, 0.025335686281323433, 0.08398032188415527, -0.07976733148097992, 0.09663865715265274, 0.0358971431851387, -0.001497785677202046, 0.023723650723695755, -0.0793011337518692, -0.03438263759016991, -0.013294925913214684, 0.029805343598127365, 0.05206681787967682, -0.14937324821949005, -0.005913340952247381, 0.024670099839568138, -0.06717023253440857, -0.02769915573298931, 0.04402351751923561, -0.14724507927894592, -0.002142647048458457, 0.05896814167499542, -0.015465384349226952, -0.028673814609646797, 0.027910497039556503, 0.07263664156198502, 0.00932394154369831, 0.08261553943157196, -0.005915057845413685, 0.01431568805128336, -0.17772382497787476, -0.03418213501572609, -0.008359605446457863, -0.010946743190288544, 0.021605869755148888, 0.03294208645820618, 0.04561598226428032, -0.0220206119120121, 0.20656727254390717, -0.038624636828899384, 0.027102792635560036, 0.07316523790359497, -0.01956196315586567, -0.09035174548625946, 0.03678317740559578, 0.07891090214252472, 0.03213953599333763, 0.027131831273436546, 0.04021273925900459, -0.059779904782772064, -0.03459523990750313, -0.026910046115517616, 0.11145035922527313, 0.15817692875862122, 0.18518926203250885, 0.0013994635082781315, 0.0653000921010971, -0.16556091606616974, -0.06686964631080627, 0.026965316385030746, -0.07425032556056976, 0.05025294050574303, -0.055264972150325775, 0.07437638193368912, 0.10053074359893799, -0.12147559970617294, 0.0916195958852768, -0.07350921630859375, -0.03403087705373764, -0.028714505955576897, -0.1255943477153778, -0.03583404794335365, -0.00913471169769764, 0.004859779961407185, -0.09563950449228287, 0.11232972890138626, 0.10972975939512253, 0.0033511826768517494, -0.0071745035238564014, 0.11407124996185303, -0.08502256125211716, -0.07652986794710159, -0.007093676365911961, 0.014959779568016529, 0.02126414142549038, 0.02019966021180153, 0.05179167911410332, 0.01955283433198929, 0.08758620172739029, 0.07769143581390381, 0.06715911626815796, 0.06470253318548203, 0.044967666268348694, -0.014804031699895859, -0.0564424954354763, 0.028393475338816643, -0.01987813413143158, -0.04452214390039444, 0.141746386885643, 0.05240059271454811, 0.044047001749277115, 0.018504764884710312, 0.24546073377132416, 0.004424436949193478, -0.03906344994902611, -0.12406153976917267, 0.11084995418787003, 0.03521507978439331, 0.003627438098192215, 0.025537807494401932, -0.14898252487182617, 0.029253192245960236, 0.18039384484291077, 0.07039935141801834, 0.03617803752422333, 0.0041955201886594296, 0.026127176359295845, 0.02477724850177765, -0.029638471081852913, 0.01161491684615612, 0.06934452056884766, 0.17834581434726715, -0.030600348487496376, 0.03546549752354622, -0.01840294897556305, -0.039358798414468765, -0.009656831622123718, 0.10512619465589523, -0.046582967042922974, 0.03455500677227974, -0.06626169383525848, 0.0767759382724762, -0.06441623717546463, -0.34285834431648254, -0.0023390287533402443, -0.11388661712408066, -0.15923331677913666, -0.007171903736889362, 0.04259689152240753, -0.021200604736804962, 0.03077453374862671, 0.04834304377436638, -0.02827804908156395, 0.15874621272087097, 0.016009291633963585, -0.0683055892586708, -0.0906602218747139, 0.09646067768335342, -0.07106804847717285, 0.23878659307956696, -0.0144603680819273, -0.000979740871116519, 0.08531580120325089, 0.0022716152016073465, -0.18916264176368713, 0.01556013710796833, 0.07547317445278168, -0.09017746150493622, 0.03087168000638485, 0.19291207194328308, -0.006561311427503824, 0.10546808689832687, 0.06429459154605865, 0.006733528804033995, 0.035148102790117264, 0.04428800940513611, 0.019555792212486267, -0.08136675506830215, 0.03723609447479248, -0.08097630739212036, 0.13743062317371368, 0.134502574801445, -0.036553237587213516, 0.01976516656577587, -0.0694616362452507, 0.06527239084243774, -0.03726193308830261, 0.10277502983808517, -0.0005328870029188693, -0.19363521039485931, 0.07090619951486588, 0.0760621502995491, 0.05007990449666977, -0.19761458039283752, -0.04594423994421959, 0.0912640318274498, -0.04746568202972412, -0.00033702378277666867, 0.13243649899959564, 0.03924211859703064, 0.020521998405456543, -0.05730097368359566, -0.05549602583050728, 0.016528083011507988, 0.10299130529165268, -0.09448505192995071, -0.03703496232628822 ]