sha
stringlengths
40
40
text
stringlengths
0
13.4M
id
stringlengths
2
117
tags
sequence
created_at
stringlengths
25
25
metadata
stringlengths
2
31.7M
last_modified
stringlengths
25
25
6aea94a16c95bbc72683433e43cec7cf8fd62bc5
trevorweston/arii
[ "region:us" ]
2024-02-15T22:01:52+00:00
{}
2024-02-15T22:05:04+00:00
5c9a1fc2c132bb638807c63e46f9fba76730c000
# Args ```python {'base_model': 'mistralai/Mistral-7B-v0.1', 'check_length_correctness': True, 'debug': False, 'hf_entity': 'vwxyzjn', 'params': TaskQueryHParams(length=3000, format_str='SUBREDDIT: r/{subreddit}\n' '\n' 'TITLE: {title}\n' '\n' 'POST: {post}\n' '\n' 'TL;DR:', truncate_field='post', truncate_text='\n', padding='pad_token', pad_token=[32000], pad_side='left', max_query_length=3000, max_sft_query_response_length=4000, max_sft_response_length=1500, max_rm_query_response_length=4500, max_rm_response_length=1500), 'push_to_hub': True} ```
vwxyzjn/ultrachat_200k_filtered_1708034814
[ "region:us" ]
2024-02-15T22:09:51+00:00
{"dataset_info": {"features": [{"name": "prompt", "dtype": "string"}, {"name": "prompt_id", "dtype": "string"}, {"name": "messages", "list": [{"name": "content", "dtype": "string"}, {"name": "role", "dtype": "string"}]}, {"name": "query", "list": [{"name": "content", "dtype": "string"}, {"name": "role", "dtype": "string"}]}, {"name": "query_token", "sequence": "int64"}, {"name": "query_reference_response", "list": [{"name": "content", "dtype": "string"}, {"name": "role", "dtype": "string"}]}, {"name": "query_reference_response_token", "sequence": "int64"}, {"name": "query_reference_response_token_len", "dtype": "int64"}, {"name": "query_token_len", "dtype": "int64"}, {"name": "reference_response", "struct": [{"name": "content", "dtype": "string"}, {"name": "role", "dtype": "string"}]}, {"name": "reference_response_token", "sequence": "int64"}, {"name": "reference_response_token_len", "dtype": "int64"}], "splits": [{"name": "test_sft", "num_bytes": 1982888370.9168758, "num_examples": 22991}, {"name": "train_sft", "num_bytes": 17846869528.524822, "num_examples": 206698}], "download_size": 3301659997, "dataset_size": 19829757899.441696}}
2024-02-15T22:12:40+00:00
d92a2db76b7636e59af247ff534c22b4a360a6e0
marcones/marcones
[ "license:openrail", "region:us" ]
2024-02-15T22:10:20+00:00
{"license": "openrail"}
2024-02-15T22:10:42+00:00
2ca50a5c84be3c0aedd53b1c56d1ce5fbe46d88b
yleo/aqua-binarized
[ "region:us" ]
2024-02-15T22:16:14+00:00
{"dataset_info": {"features": [{"name": "instruction", "dtype": "string"}, {"name": "chosen", "dtype": "string"}, {"name": "rejected", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 2345096, "num_examples": 1000}], "download_size": 1405535, "dataset_size": 2345096}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]}
2024-02-16T22:49:40+00:00
58cd51c9c8434992e10c6126e70d769824f18d70
ekolasky/MainResultIdForMistral7B
[ "region:us" ]
2024-02-15T22:20:03+00:00
{"dataset_info": {"features": [{"name": "instruction", "dtype": "string"}, {"name": "outputs", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 906830, "num_examples": 333}], "download_size": 504756, "dataset_size": 906830}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]}
2024-02-15T22:21:23+00:00
02478d2b9ea312804fd60a4088ef80a67e0c7283
# Dataset consisting of anonymous polish stories ## Warning: Stories were not curated by me, some may use strong language or use sexual references This dataset consists of all (28k) stories dumped from [anonimowe wyzwania](https://anonimowe.pl/) in January. Stories are submitted by anonymous users. I have included a community rating, which you can use for filtering. Stories are very diverse — some are sad, some funny. The huge amount might sound fake, but the vast majority is made by young people. This may help your model to not sound strict, corporate, boring, or academic. Default sorting is based on community rating. More information about requirements for stories: [link](https://anonimowe.pl/faq) ## Where to find me - [Github](https://github.com/JonaszPotoniec) - [Linkedin](https://www.linkedin.com/in/jonasz-potoniec/) - [E-mail](mailto:[email protected]) - [Telegram](https://t.me/JonaszPotoniec)
JonaszPotoniec/anonimowe-polish-stories
[ "task_categories:text-generation", "size_categories:10K<n<100K", "language:pl", "license:mit", "not-for-all-audiences", "region:us" ]
2024-02-15T22:20:13+00:00
{"language": ["pl"], "license": "mit", "size_categories": ["10K<n<100K"], "task_categories": ["text-generation"], "pretty_name": "Anonimowe wyzwania", "dataset_info": {"features": [{"name": "points", "dtype": "int64"}, {"name": "story", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 33017836, "num_examples": 27798}], "download_size": 22463377, "dataset_size": 33017836}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}], "tags": ["not-for-all-audiences"]}
2024-02-15T22:39:55+00:00
224c2e806d73c98a15c688dac6ad0efb3714ca68
marcones/vozmarcones
[ "license:openrail", "region:us" ]
2024-02-15T22:22:44+00:00
{"license": "openrail"}
2024-02-15T22:22:59+00:00
3bbd3fa7009ed6534ede5bbb220b8e10b6025985
# Args ```python {'base_model': 'mistralai/Mistral-7B-v0.1', 'check_length_correctness': True, 'debug': False, 'hf_entity': 'vwxyzjn', 'params': TaskQueryHParams(length=3000, format_str='SUBREDDIT: r/{subreddit}\n' '\n' 'TITLE: {title}\n' '\n' 'POST: {post}\n' '\n' 'TL;DR:', truncate_field='post', truncate_text='\n', padding='pad_token', pad_token=[32000], pad_side='left', max_query_length=3000, max_sft_query_response_length=4000, max_sft_response_length=1500, max_rm_query_response_length=4500, max_rm_response_length=1500), 'push_to_hub': True} ```
vwxyzjn/ultrachat_200k_filtered_1708035667
[ "region:us" ]
2024-02-15T22:23:56+00:00
{"dataset_info": {"features": [{"name": "prompt", "dtype": "string"}, {"name": "prompt_id", "dtype": "string"}, {"name": "messages", "list": [{"name": "content", "dtype": "string"}, {"name": "role", "dtype": "string"}]}, {"name": "query", "list": [{"name": "content", "dtype": "string"}, {"name": "role", "dtype": "string"}]}, {"name": "query_token", "sequence": "int64"}, {"name": "query_reference_response", "list": [{"name": "content", "dtype": "string"}, {"name": "role", "dtype": "string"}]}, {"name": "query_reference_response_token", "sequence": "int64"}, {"name": "query_reference_response_token_len", "dtype": "int64"}, {"name": "query_token_len", "dtype": "int64"}, {"name": "reference_response", "struct": [{"name": "content", "dtype": "string"}, {"name": "role", "dtype": "string"}]}, {"name": "reference_response_token", "sequence": "int64"}, {"name": "reference_response_token_len", "dtype": "int64"}], "splits": [{"name": "test_sft", "num_bytes": 1982888370.9168758, "num_examples": 22991}, {"name": "train_sft", "num_bytes": 17846869528.524822, "num_examples": 206698}], "download_size": 3301659997, "dataset_size": 19829757899.441696}}
2024-02-15T22:29:05+00:00
4ed6db01a0dd01e3f93cf00fea9d74c7d63953d5
MatsuoDochiai/IAE
[ "license:openrail", "region:us" ]
2024-02-15T22:24:55+00:00
{"license": "openrail"}
2024-02-15T22:25:14+00:00
24e68a15ed3e18781397e6ea448fa00a65812ac3
Generated by ChatGPT
CreitinGameplays/you-are-elisa-chan
[ "region:us" ]
2024-02-15T22:26:23+00:00
{}
2024-02-15T22:27:31+00:00
63c71da57f5891125e2c5fd36ff1351747675891
# Dataset Card for "ultrafeedback_binarized_1708035667" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
vwxyzjn/ultrafeedback_binarized_1708035667
[ "region:us" ]
2024-02-15T22:28:31+00:00
{"dataset_info": {"features": [{"name": "prompt", "dtype": "string"}, {"name": "prompt_id", "dtype": "string"}, {"name": "chosen", "list": [{"name": "content", "dtype": "string"}, {"name": "role", "dtype": "string"}]}, {"name": "rejected", "list": [{"name": "content", "dtype": "string"}, {"name": "role", "dtype": "string"}]}, {"name": "messages", "list": [{"name": "content", "dtype": "string"}, {"name": "role", "dtype": "string"}]}, {"name": "score_chosen", "dtype": "float64"}, {"name": "score_rejected", "dtype": "float64"}, {"name": "query", "list": [{"name": "content", "dtype": "string"}, {"name": "role", "dtype": "string"}]}, {"name": "query_token", "sequence": "int64"}, {"name": "query_token_len", "dtype": "int64"}, {"name": "query_chosen_token", "sequence": "int64"}, {"name": "query_chosen_token_len", "dtype": "int64"}, {"name": "chosen_token", "sequence": "int64"}, {"name": "chosen_token_len", "dtype": "int64"}, {"name": "query_rejected_token", "sequence": "int64"}, {"name": "query_rejected_token_len", "dtype": "int64"}, {"name": "rejected_token", "sequence": "int64"}, {"name": "rejected_token_len", "dtype": "int64"}], "splits": [{"name": "test_prefs", "num_bytes": 235051943.0, "num_examples": 2000}, {"name": "train_prefs", "num_bytes": 7188255622.3255415, "num_examples": 61112}], "download_size": 477048940, "dataset_size": 7423307565.3255415}}
2024-02-15T22:29:04+00:00
509daa48db465032eeda6721595abb09a638ee2f
# Dataset Card for Evaluation run of BarraHome/PequeLLaMa-1B-Instruct-v0.1-16bit <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [BarraHome/PequeLLaMa-1B-Instruct-v0.1-16bit](https://huggingface.co/BarraHome/PequeLLaMa-1B-Instruct-v0.1-16bit) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_BarraHome__PequeLLaMa-1B-Instruct-v0.1-16bit", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-02-15T22:30:35.769938](https://huggingface.co/datasets/open-llm-leaderboard/details_BarraHome__PequeLLaMa-1B-Instruct-v0.1-16bit/blob/main/results_2024-02-15T22-30-35.769938.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.24925356596098744, "acc_stderr": 0.03049039803240805, "acc_norm": 0.250967942400928, "acc_norm_stderr": 0.0312990053733219, "mc1": 0.22031823745410037, "mc1_stderr": 0.014509045171487295, "mc2": 0.41096447978752615, "mc2_stderr": 0.014916925934314724 }, "harness|arc:challenge|25": { "acc": 0.24658703071672355, "acc_stderr": 0.01259572626879012, "acc_norm": 0.27986348122866894, "acc_norm_stderr": 0.013119040897725922 }, "harness|hellaswag|10": { "acc": 0.333698466440948, "acc_stderr": 0.0047056977452221435, "acc_norm": 0.4302927703644692, "acc_norm_stderr": 0.004941051795214789 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.23, "acc_stderr": 0.04229525846816508, "acc_norm": 0.23, "acc_norm_stderr": 0.04229525846816508 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.2, "acc_stderr": 0.03455473702325437, "acc_norm": 0.2, "acc_norm_stderr": 0.03455473702325437 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.17763157894736842, "acc_stderr": 0.031103182383123384, "acc_norm": 0.17763157894736842, "acc_norm_stderr": 0.031103182383123384 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.28, "acc_stderr": 0.04512608598542127, "acc_norm": 0.28, "acc_norm_stderr": 0.04512608598542127 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.21132075471698114, "acc_stderr": 0.025125766484827845, "acc_norm": 0.21132075471698114, "acc_norm_stderr": 0.025125766484827845 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.2569444444444444, "acc_stderr": 0.03653946969442099, "acc_norm": 0.2569444444444444, "acc_norm_stderr": 0.03653946969442099 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.2, "acc_stderr": 0.04020151261036846, "acc_norm": 0.2, "acc_norm_stderr": 0.04020151261036846 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.35, "acc_stderr": 0.04793724854411019, "acc_norm": 0.35, "acc_norm_stderr": 0.04793724854411019 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.28, "acc_stderr": 0.04512608598542128, "acc_norm": 0.28, "acc_norm_stderr": 0.04512608598542128 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.20809248554913296, "acc_stderr": 0.030952890217749884, "acc_norm": 0.20809248554913296, "acc_norm_stderr": 0.030952890217749884 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.21568627450980393, "acc_stderr": 0.04092563958237654, "acc_norm": 0.21568627450980393, "acc_norm_stderr": 0.04092563958237654 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.29, "acc_stderr": 0.045604802157206845, "acc_norm": 0.29, "acc_norm_stderr": 0.045604802157206845 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.2425531914893617, "acc_stderr": 0.028020226271200217, "acc_norm": 0.2425531914893617, "acc_norm_stderr": 0.028020226271200217 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.2894736842105263, "acc_stderr": 0.04266339443159394, "acc_norm": 0.2894736842105263, "acc_norm_stderr": 0.04266339443159394 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.1724137931034483, "acc_stderr": 0.03147830790259575, "acc_norm": 0.1724137931034483, "acc_norm_stderr": 0.03147830790259575 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.21428571428571427, "acc_stderr": 0.02113285918275444, "acc_norm": 0.21428571428571427, "acc_norm_stderr": 0.02113285918275444 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.25396825396825395, "acc_stderr": 0.03893259610604673, "acc_norm": 0.25396825396825395, "acc_norm_stderr": 0.03893259610604673 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.18, "acc_stderr": 0.038612291966536934, "acc_norm": 0.18, "acc_norm_stderr": 0.038612291966536934 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.16129032258064516, "acc_stderr": 0.020923327006423305, "acc_norm": 0.16129032258064516, "acc_norm_stderr": 0.020923327006423305 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.1477832512315271, "acc_stderr": 0.02496962133352127, "acc_norm": 0.1477832512315271, "acc_norm_stderr": 0.02496962133352127 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.29, "acc_stderr": 0.045604802157206845, "acc_norm": 0.29, "acc_norm_stderr": 0.045604802157206845 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.21818181818181817, "acc_stderr": 0.03225078108306289, "acc_norm": 0.21818181818181817, "acc_norm_stderr": 0.03225078108306289 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.17676767676767677, "acc_stderr": 0.027178752639044915, "acc_norm": 0.17676767676767677, "acc_norm_stderr": 0.027178752639044915 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.24352331606217617, "acc_stderr": 0.03097543638684542, "acc_norm": 0.24352331606217617, "acc_norm_stderr": 0.03097543638684542 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.24615384615384617, "acc_stderr": 0.021840866990423088, "acc_norm": 0.24615384615384617, "acc_norm_stderr": 0.021840866990423088 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.2814814814814815, "acc_stderr": 0.027420019350945277, "acc_norm": 0.2814814814814815, "acc_norm_stderr": 0.027420019350945277 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.18907563025210083, "acc_stderr": 0.025435119438105353, "acc_norm": 0.18907563025210083, "acc_norm_stderr": 0.025435119438105353 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.2185430463576159, "acc_stderr": 0.033742355504256936, "acc_norm": 0.2185430463576159, "acc_norm_stderr": 0.033742355504256936 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.20550458715596331, "acc_stderr": 0.017324352325016005, "acc_norm": 0.20550458715596331, "acc_norm_stderr": 0.017324352325016005 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.46296296296296297, "acc_stderr": 0.03400603625538272, "acc_norm": 0.46296296296296297, "acc_norm_stderr": 0.03400603625538272 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.2696078431372549, "acc_stderr": 0.03114557065948678, "acc_norm": 0.2696078431372549, "acc_norm_stderr": 0.03114557065948678 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.28270042194092826, "acc_stderr": 0.029312814153955927, "acc_norm": 0.28270042194092826, "acc_norm_stderr": 0.029312814153955927 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.29596412556053814, "acc_stderr": 0.030636591348699796, "acc_norm": 0.29596412556053814, "acc_norm_stderr": 0.030636591348699796 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.2595419847328244, "acc_stderr": 0.03844876139785271, "acc_norm": 0.2595419847328244, "acc_norm_stderr": 0.03844876139785271 }, "harness|hendrycksTest-international_law|5": { "acc": 0.38016528925619836, "acc_stderr": 0.04431324501968432, "acc_norm": 0.38016528925619836, "acc_norm_stderr": 0.04431324501968432 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.28703703703703703, "acc_stderr": 0.043733130409147614, "acc_norm": 0.28703703703703703, "acc_norm_stderr": 0.043733130409147614 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.2883435582822086, "acc_stderr": 0.03559039531617342, "acc_norm": 0.2883435582822086, "acc_norm_stderr": 0.03559039531617342 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.2767857142857143, "acc_stderr": 0.04246624336697624, "acc_norm": 0.2767857142857143, "acc_norm_stderr": 0.04246624336697624 }, "harness|hendrycksTest-management|5": { "acc": 0.1650485436893204, "acc_stderr": 0.036756688322331886, "acc_norm": 0.1650485436893204, "acc_norm_stderr": 0.036756688322331886 }, "harness|hendrycksTest-marketing|5": { "acc": 0.2606837606837607, "acc_stderr": 0.028760348956523414, "acc_norm": 0.2606837606837607, "acc_norm_stderr": 0.028760348956523414 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.3, "acc_stderr": 0.046056618647183814, "acc_norm": 0.3, "acc_norm_stderr": 0.046056618647183814 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.2515964240102171, "acc_stderr": 0.015517322365529614, "acc_norm": 0.2515964240102171, "acc_norm_stderr": 0.015517322365529614 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.2514450867052023, "acc_stderr": 0.02335736578587404, "acc_norm": 0.2514450867052023, "acc_norm_stderr": 0.02335736578587404 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.23575418994413408, "acc_stderr": 0.014196375686290804, "acc_norm": 0.23575418994413408, "acc_norm_stderr": 0.014196375686290804 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.22549019607843138, "acc_stderr": 0.023929155517351284, "acc_norm": 0.22549019607843138, "acc_norm_stderr": 0.023929155517351284 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.21221864951768488, "acc_stderr": 0.02322275679743511, "acc_norm": 0.21221864951768488, "acc_norm_stderr": 0.02322275679743511 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.23148148148148148, "acc_stderr": 0.023468429832451163, "acc_norm": 0.23148148148148148, "acc_norm_stderr": 0.023468429832451163 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.22695035460992907, "acc_stderr": 0.024987106365642973, "acc_norm": 0.22695035460992907, "acc_norm_stderr": 0.024987106365642973 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.23533246414602346, "acc_stderr": 0.010834432543912226, "acc_norm": 0.23533246414602346, "acc_norm_stderr": 0.010834432543912226 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.3382352941176471, "acc_stderr": 0.028739328513983572, "acc_norm": 0.3382352941176471, "acc_norm_stderr": 0.028739328513983572 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.25, "acc_stderr": 0.01751781884501444, "acc_norm": 0.25, "acc_norm_stderr": 0.01751781884501444 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.23636363636363636, "acc_stderr": 0.04069306319721376, "acc_norm": 0.23636363636363636, "acc_norm_stderr": 0.04069306319721376 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.20816326530612245, "acc_stderr": 0.025991117672813292, "acc_norm": 0.20816326530612245, "acc_norm_stderr": 0.025991117672813292 }, "harness|hendrycksTest-sociology|5": { "acc": 0.2537313432835821, "acc_stderr": 0.030769444967296014, "acc_norm": 0.2537313432835821, "acc_norm_stderr": 0.030769444967296014 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.2, "acc_stderr": 0.04020151261036845, "acc_norm": 0.2, "acc_norm_stderr": 0.04020151261036845 }, "harness|hendrycksTest-virology|5": { "acc": 0.25903614457831325, "acc_stderr": 0.03410646614071857, "acc_norm": 0.25903614457831325, "acc_norm_stderr": 0.03410646614071857 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.3216374269005848, "acc_stderr": 0.03582529442573122, "acc_norm": 0.3216374269005848, "acc_norm_stderr": 0.03582529442573122 }, "harness|truthfulqa:mc|0": { "mc1": 0.22031823745410037, "mc1_stderr": 0.014509045171487295, "mc2": 0.41096447978752615, "mc2_stderr": 0.014916925934314724 }, "harness|winogrande|5": { "acc": 0.5272296764009471, "acc_stderr": 0.014031631629827701 }, "harness|gsm8k|5": { "acc": 0.0, "acc_stderr": 0.0 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_BarraHome__PequeLLaMa-1B-Instruct-v0.1-16bit
[ "region:us" ]
2024-02-15T22:32:54+00:00
{"pretty_name": "Evaluation run of BarraHome/PequeLLaMa-1B-Instruct-v0.1-16bit", "dataset_summary": "Dataset automatically created during the evaluation run of model [BarraHome/PequeLLaMa-1B-Instruct-v0.1-16bit](https://huggingface.co/BarraHome/PequeLLaMa-1B-Instruct-v0.1-16bit) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_BarraHome__PequeLLaMa-1B-Instruct-v0.1-16bit\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-02-15T22:30:35.769938](https://huggingface.co/datasets/open-llm-leaderboard/details_BarraHome__PequeLLaMa-1B-Instruct-v0.1-16bit/blob/main/results_2024-02-15T22-30-35.769938.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.24925356596098744,\n \"acc_stderr\": 0.03049039803240805,\n \"acc_norm\": 0.250967942400928,\n \"acc_norm_stderr\": 0.0312990053733219,\n \"mc1\": 0.22031823745410037,\n \"mc1_stderr\": 0.014509045171487295,\n \"mc2\": 0.41096447978752615,\n \"mc2_stderr\": 0.014916925934314724\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.24658703071672355,\n \"acc_stderr\": 0.01259572626879012,\n \"acc_norm\": 0.27986348122866894,\n \"acc_norm_stderr\": 0.013119040897725922\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.333698466440948,\n \"acc_stderr\": 0.0047056977452221435,\n \"acc_norm\": 0.4302927703644692,\n \"acc_norm_stderr\": 0.004941051795214789\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.23,\n \"acc_stderr\": 0.04229525846816508,\n \"acc_norm\": 0.23,\n \"acc_norm_stderr\": 0.04229525846816508\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.2,\n \"acc_stderr\": 0.03455473702325437,\n \"acc_norm\": 0.2,\n \"acc_norm_stderr\": 0.03455473702325437\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.17763157894736842,\n \"acc_stderr\": 0.031103182383123384,\n \"acc_norm\": 0.17763157894736842,\n \"acc_norm_stderr\": 0.031103182383123384\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.28,\n \"acc_stderr\": 0.04512608598542127,\n \"acc_norm\": 0.28,\n \"acc_norm_stderr\": 0.04512608598542127\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.21132075471698114,\n \"acc_stderr\": 0.025125766484827845,\n \"acc_norm\": 0.21132075471698114,\n \"acc_norm_stderr\": 0.025125766484827845\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.2569444444444444,\n \"acc_stderr\": 0.03653946969442099,\n \"acc_norm\": 0.2569444444444444,\n \"acc_norm_stderr\": 0.03653946969442099\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.2,\n \"acc_stderr\": 0.04020151261036846,\n \"acc_norm\": 0.2,\n \"acc_norm_stderr\": 0.04020151261036846\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.35,\n \"acc_stderr\": 0.04793724854411019,\n \"acc_norm\": 0.35,\n \"acc_norm_stderr\": 0.04793724854411019\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.28,\n \"acc_stderr\": 0.04512608598542128,\n \"acc_norm\": 0.28,\n \"acc_norm_stderr\": 0.04512608598542128\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.20809248554913296,\n \"acc_stderr\": 0.030952890217749884,\n \"acc_norm\": 0.20809248554913296,\n \"acc_norm_stderr\": 0.030952890217749884\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.21568627450980393,\n \"acc_stderr\": 0.04092563958237654,\n \"acc_norm\": 0.21568627450980393,\n \"acc_norm_stderr\": 0.04092563958237654\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.29,\n \"acc_stderr\": 0.045604802157206845,\n \"acc_norm\": 0.29,\n \"acc_norm_stderr\": 0.045604802157206845\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.2425531914893617,\n \"acc_stderr\": 0.028020226271200217,\n \"acc_norm\": 0.2425531914893617,\n \"acc_norm_stderr\": 0.028020226271200217\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.2894736842105263,\n \"acc_stderr\": 0.04266339443159394,\n \"acc_norm\": 0.2894736842105263,\n \"acc_norm_stderr\": 0.04266339443159394\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.1724137931034483,\n \"acc_stderr\": 0.03147830790259575,\n \"acc_norm\": 0.1724137931034483,\n \"acc_norm_stderr\": 0.03147830790259575\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.21428571428571427,\n \"acc_stderr\": 0.02113285918275444,\n \"acc_norm\": 0.21428571428571427,\n \"acc_norm_stderr\": 0.02113285918275444\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.25396825396825395,\n \"acc_stderr\": 0.03893259610604673,\n \"acc_norm\": 0.25396825396825395,\n \"acc_norm_stderr\": 0.03893259610604673\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.18,\n \"acc_stderr\": 0.038612291966536934,\n \"acc_norm\": 0.18,\n \"acc_norm_stderr\": 0.038612291966536934\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.16129032258064516,\n \"acc_stderr\": 0.020923327006423305,\n \"acc_norm\": 0.16129032258064516,\n \"acc_norm_stderr\": 0.020923327006423305\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.1477832512315271,\n \"acc_stderr\": 0.02496962133352127,\n \"acc_norm\": 0.1477832512315271,\n \"acc_norm_stderr\": 0.02496962133352127\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.29,\n \"acc_stderr\": 0.045604802157206845,\n \"acc_norm\": 0.29,\n \"acc_norm_stderr\": 0.045604802157206845\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.21818181818181817,\n \"acc_stderr\": 0.03225078108306289,\n \"acc_norm\": 0.21818181818181817,\n \"acc_norm_stderr\": 0.03225078108306289\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.17676767676767677,\n \"acc_stderr\": 0.027178752639044915,\n \"acc_norm\": 0.17676767676767677,\n \"acc_norm_stderr\": 0.027178752639044915\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.24352331606217617,\n \"acc_stderr\": 0.03097543638684542,\n \"acc_norm\": 0.24352331606217617,\n \"acc_norm_stderr\": 0.03097543638684542\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.24615384615384617,\n \"acc_stderr\": 0.021840866990423088,\n \"acc_norm\": 0.24615384615384617,\n \"acc_norm_stderr\": 0.021840866990423088\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.2814814814814815,\n \"acc_stderr\": 0.027420019350945277,\n \"acc_norm\": 0.2814814814814815,\n \"acc_norm_stderr\": 0.027420019350945277\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.18907563025210083,\n \"acc_stderr\": 0.025435119438105353,\n \"acc_norm\": 0.18907563025210083,\n \"acc_norm_stderr\": 0.025435119438105353\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.2185430463576159,\n \"acc_stderr\": 0.033742355504256936,\n \"acc_norm\": 0.2185430463576159,\n \"acc_norm_stderr\": 0.033742355504256936\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.20550458715596331,\n \"acc_stderr\": 0.017324352325016005,\n \"acc_norm\": 0.20550458715596331,\n \"acc_norm_stderr\": 0.017324352325016005\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.46296296296296297,\n \"acc_stderr\": 0.03400603625538272,\n \"acc_norm\": 0.46296296296296297,\n \"acc_norm_stderr\": 0.03400603625538272\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.2696078431372549,\n \"acc_stderr\": 0.03114557065948678,\n \"acc_norm\": 0.2696078431372549,\n \"acc_norm_stderr\": 0.03114557065948678\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.28270042194092826,\n \"acc_stderr\": 0.029312814153955927,\n \"acc_norm\": 0.28270042194092826,\n \"acc_norm_stderr\": 0.029312814153955927\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.29596412556053814,\n \"acc_stderr\": 0.030636591348699796,\n \"acc_norm\": 0.29596412556053814,\n \"acc_norm_stderr\": 0.030636591348699796\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.2595419847328244,\n \"acc_stderr\": 0.03844876139785271,\n \"acc_norm\": 0.2595419847328244,\n \"acc_norm_stderr\": 0.03844876139785271\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.38016528925619836,\n \"acc_stderr\": 0.04431324501968432,\n \"acc_norm\": 0.38016528925619836,\n \"acc_norm_stderr\": 0.04431324501968432\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.28703703703703703,\n \"acc_stderr\": 0.043733130409147614,\n \"acc_norm\": 0.28703703703703703,\n \"acc_norm_stderr\": 0.043733130409147614\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.2883435582822086,\n \"acc_stderr\": 0.03559039531617342,\n \"acc_norm\": 0.2883435582822086,\n \"acc_norm_stderr\": 0.03559039531617342\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.2767857142857143,\n \"acc_stderr\": 0.04246624336697624,\n \"acc_norm\": 0.2767857142857143,\n \"acc_norm_stderr\": 0.04246624336697624\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.1650485436893204,\n \"acc_stderr\": 0.036756688322331886,\n \"acc_norm\": 0.1650485436893204,\n \"acc_norm_stderr\": 0.036756688322331886\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.2606837606837607,\n \"acc_stderr\": 0.028760348956523414,\n \"acc_norm\": 0.2606837606837607,\n \"acc_norm_stderr\": 0.028760348956523414\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.3,\n \"acc_stderr\": 0.046056618647183814,\n \"acc_norm\": 0.3,\n \"acc_norm_stderr\": 0.046056618647183814\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.2515964240102171,\n \"acc_stderr\": 0.015517322365529614,\n \"acc_norm\": 0.2515964240102171,\n \"acc_norm_stderr\": 0.015517322365529614\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.2514450867052023,\n \"acc_stderr\": 0.02335736578587404,\n \"acc_norm\": 0.2514450867052023,\n \"acc_norm_stderr\": 0.02335736578587404\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.23575418994413408,\n \"acc_stderr\": 0.014196375686290804,\n \"acc_norm\": 0.23575418994413408,\n \"acc_norm_stderr\": 0.014196375686290804\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.22549019607843138,\n \"acc_stderr\": 0.023929155517351284,\n \"acc_norm\": 0.22549019607843138,\n \"acc_norm_stderr\": 0.023929155517351284\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.21221864951768488,\n \"acc_stderr\": 0.02322275679743511,\n \"acc_norm\": 0.21221864951768488,\n \"acc_norm_stderr\": 0.02322275679743511\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.23148148148148148,\n \"acc_stderr\": 0.023468429832451163,\n \"acc_norm\": 0.23148148148148148,\n \"acc_norm_stderr\": 0.023468429832451163\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.22695035460992907,\n \"acc_stderr\": 0.024987106365642973,\n \"acc_norm\": 0.22695035460992907,\n \"acc_norm_stderr\": 0.024987106365642973\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.23533246414602346,\n \"acc_stderr\": 0.010834432543912226,\n \"acc_norm\": 0.23533246414602346,\n \"acc_norm_stderr\": 0.010834432543912226\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.3382352941176471,\n \"acc_stderr\": 0.028739328513983572,\n \"acc_norm\": 0.3382352941176471,\n \"acc_norm_stderr\": 0.028739328513983572\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.25,\n \"acc_stderr\": 0.01751781884501444,\n \"acc_norm\": 0.25,\n \"acc_norm_stderr\": 0.01751781884501444\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.23636363636363636,\n \"acc_stderr\": 0.04069306319721376,\n \"acc_norm\": 0.23636363636363636,\n \"acc_norm_stderr\": 0.04069306319721376\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.20816326530612245,\n \"acc_stderr\": 0.025991117672813292,\n \"acc_norm\": 0.20816326530612245,\n \"acc_norm_stderr\": 0.025991117672813292\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.2537313432835821,\n \"acc_stderr\": 0.030769444967296014,\n \"acc_norm\": 0.2537313432835821,\n \"acc_norm_stderr\": 0.030769444967296014\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.2,\n \"acc_stderr\": 0.04020151261036845,\n \"acc_norm\": 0.2,\n \"acc_norm_stderr\": 0.04020151261036845\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.25903614457831325,\n \"acc_stderr\": 0.03410646614071857,\n \"acc_norm\": 0.25903614457831325,\n \"acc_norm_stderr\": 0.03410646614071857\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.3216374269005848,\n \"acc_stderr\": 0.03582529442573122,\n \"acc_norm\": 0.3216374269005848,\n \"acc_norm_stderr\": 0.03582529442573122\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.22031823745410037,\n \"mc1_stderr\": 0.014509045171487295,\n \"mc2\": 0.41096447978752615,\n \"mc2_stderr\": 0.014916925934314724\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.5272296764009471,\n \"acc_stderr\": 0.014031631629827701\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.0,\n \"acc_stderr\": 0.0\n }\n}\n```", "repo_url": "https://huggingface.co/BarraHome/PequeLLaMa-1B-Instruct-v0.1-16bit", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_02_15T22_30_35.769938", "path": ["**/details_harness|arc:challenge|25_2024-02-15T22-30-35.769938.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-02-15T22-30-35.769938.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_02_15T22_30_35.769938", "path": ["**/details_harness|gsm8k|5_2024-02-15T22-30-35.769938.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-02-15T22-30-35.769938.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_02_15T22_30_35.769938", "path": ["**/details_harness|hellaswag|10_2024-02-15T22-30-35.769938.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-02-15T22-30-35.769938.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_02_15T22_30_35.769938", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-15T22-30-35.769938.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-02-15T22-30-35.769938.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-02-15T22-30-35.769938.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-02-15T22-30-35.769938.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-15T22-30-35.769938.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-02-15T22-30-35.769938.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-02-15T22-30-35.769938.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-02-15T22-30-35.769938.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-02-15T22-30-35.769938.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-02-15T22-30-35.769938.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-02-15T22-30-35.769938.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-02-15T22-30-35.769938.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-15T22-30-35.769938.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-02-15T22-30-35.769938.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-15T22-30-35.769938.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-15T22-30-35.769938.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-02-15T22-30-35.769938.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-02-15T22-30-35.769938.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-02-15T22-30-35.769938.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-15T22-30-35.769938.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-15T22-30-35.769938.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-15T22-30-35.769938.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-02-15T22-30-35.769938.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-15T22-30-35.769938.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-15T22-30-35.769938.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-15T22-30-35.769938.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-15T22-30-35.769938.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-02-15T22-30-35.769938.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-15T22-30-35.769938.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-15T22-30-35.769938.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-15T22-30-35.769938.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-15T22-30-35.769938.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-02-15T22-30-35.769938.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-02-15T22-30-35.769938.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-02-15T22-30-35.769938.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-02-15T22-30-35.769938.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-15T22-30-35.769938.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-02-15T22-30-35.769938.parquet", "**/details_harness|hendrycksTest-management|5_2024-02-15T22-30-35.769938.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-02-15T22-30-35.769938.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-02-15T22-30-35.769938.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-02-15T22-30-35.769938.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-02-15T22-30-35.769938.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-15T22-30-35.769938.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-02-15T22-30-35.769938.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-02-15T22-30-35.769938.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-02-15T22-30-35.769938.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-02-15T22-30-35.769938.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-02-15T22-30-35.769938.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-02-15T22-30-35.769938.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-02-15T22-30-35.769938.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-02-15T22-30-35.769938.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-02-15T22-30-35.769938.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-02-15T22-30-35.769938.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-15T22-30-35.769938.parquet", "**/details_harness|hendrycksTest-virology|5_2024-02-15T22-30-35.769938.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-02-15T22-30-35.769938.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-15T22-30-35.769938.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-02-15T22-30-35.769938.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-02-15T22-30-35.769938.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-02-15T22-30-35.769938.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-15T22-30-35.769938.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-02-15T22-30-35.769938.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-02-15T22-30-35.769938.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-02-15T22-30-35.769938.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-02-15T22-30-35.769938.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-02-15T22-30-35.769938.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-02-15T22-30-35.769938.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-02-15T22-30-35.769938.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-15T22-30-35.769938.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-02-15T22-30-35.769938.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-15T22-30-35.769938.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-15T22-30-35.769938.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-02-15T22-30-35.769938.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-02-15T22-30-35.769938.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-02-15T22-30-35.769938.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-15T22-30-35.769938.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-15T22-30-35.769938.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-15T22-30-35.769938.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-02-15T22-30-35.769938.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-15T22-30-35.769938.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-15T22-30-35.769938.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-15T22-30-35.769938.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-15T22-30-35.769938.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-02-15T22-30-35.769938.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-15T22-30-35.769938.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-15T22-30-35.769938.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-15T22-30-35.769938.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-15T22-30-35.769938.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-02-15T22-30-35.769938.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-02-15T22-30-35.769938.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-02-15T22-30-35.769938.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-02-15T22-30-35.769938.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-15T22-30-35.769938.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-02-15T22-30-35.769938.parquet", "**/details_harness|hendrycksTest-management|5_2024-02-15T22-30-35.769938.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-02-15T22-30-35.769938.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-02-15T22-30-35.769938.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-02-15T22-30-35.769938.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-02-15T22-30-35.769938.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-15T22-30-35.769938.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-02-15T22-30-35.769938.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-02-15T22-30-35.769938.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-02-15T22-30-35.769938.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-02-15T22-30-35.769938.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-02-15T22-30-35.769938.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-02-15T22-30-35.769938.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-02-15T22-30-35.769938.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-02-15T22-30-35.769938.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-02-15T22-30-35.769938.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-02-15T22-30-35.769938.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-15T22-30-35.769938.parquet", "**/details_harness|hendrycksTest-virology|5_2024-02-15T22-30-35.769938.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-02-15T22-30-35.769938.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_02_15T22_30_35.769938", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-15T22-30-35.769938.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-15T22-30-35.769938.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_02_15T22_30_35.769938", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-02-15T22-30-35.769938.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-02-15T22-30-35.769938.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_02_15T22_30_35.769938", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-02-15T22-30-35.769938.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-02-15T22-30-35.769938.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_02_15T22_30_35.769938", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-02-15T22-30-35.769938.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-02-15T22-30-35.769938.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_02_15T22_30_35.769938", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-15T22-30-35.769938.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-15T22-30-35.769938.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_02_15T22_30_35.769938", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-02-15T22-30-35.769938.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-02-15T22-30-35.769938.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_02_15T22_30_35.769938", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-02-15T22-30-35.769938.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-02-15T22-30-35.769938.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_02_15T22_30_35.769938", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-02-15T22-30-35.769938.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-02-15T22-30-35.769938.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_02_15T22_30_35.769938", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-02-15T22-30-35.769938.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-02-15T22-30-35.769938.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_02_15T22_30_35.769938", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-02-15T22-30-35.769938.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-02-15T22-30-35.769938.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_02_15T22_30_35.769938", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-02-15T22-30-35.769938.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-02-15T22-30-35.769938.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_02_15T22_30_35.769938", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-02-15T22-30-35.769938.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-02-15T22-30-35.769938.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_02_15T22_30_35.769938", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-15T22-30-35.769938.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-15T22-30-35.769938.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_02_15T22_30_35.769938", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-02-15T22-30-35.769938.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-02-15T22-30-35.769938.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_02_15T22_30_35.769938", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-15T22-30-35.769938.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-15T22-30-35.769938.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_02_15T22_30_35.769938", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-15T22-30-35.769938.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-15T22-30-35.769938.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_02_15T22_30_35.769938", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-02-15T22-30-35.769938.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-02-15T22-30-35.769938.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_02_15T22_30_35.769938", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-02-15T22-30-35.769938.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-02-15T22-30-35.769938.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_02_15T22_30_35.769938", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-02-15T22-30-35.769938.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-02-15T22-30-35.769938.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_02_15T22_30_35.769938", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-15T22-30-35.769938.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-15T22-30-35.769938.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_02_15T22_30_35.769938", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-15T22-30-35.769938.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-15T22-30-35.769938.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_02_15T22_30_35.769938", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-15T22-30-35.769938.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-15T22-30-35.769938.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_02_15T22_30_35.769938", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-02-15T22-30-35.769938.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-02-15T22-30-35.769938.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_02_15T22_30_35.769938", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-15T22-30-35.769938.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-15T22-30-35.769938.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_02_15T22_30_35.769938", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-15T22-30-35.769938.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-15T22-30-35.769938.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_02_15T22_30_35.769938", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-15T22-30-35.769938.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-15T22-30-35.769938.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_02_15T22_30_35.769938", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-15T22-30-35.769938.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-15T22-30-35.769938.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_02_15T22_30_35.769938", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-02-15T22-30-35.769938.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-02-15T22-30-35.769938.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_02_15T22_30_35.769938", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-15T22-30-35.769938.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-15T22-30-35.769938.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_02_15T22_30_35.769938", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-15T22-30-35.769938.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-15T22-30-35.769938.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_02_15T22_30_35.769938", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-15T22-30-35.769938.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-15T22-30-35.769938.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_02_15T22_30_35.769938", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-15T22-30-35.769938.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-15T22-30-35.769938.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_02_15T22_30_35.769938", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-02-15T22-30-35.769938.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-02-15T22-30-35.769938.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_02_15T22_30_35.769938", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-02-15T22-30-35.769938.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-02-15T22-30-35.769938.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_02_15T22_30_35.769938", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-02-15T22-30-35.769938.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-02-15T22-30-35.769938.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_02_15T22_30_35.769938", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-02-15T22-30-35.769938.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-02-15T22-30-35.769938.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_02_15T22_30_35.769938", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-15T22-30-35.769938.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-15T22-30-35.769938.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_02_15T22_30_35.769938", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-02-15T22-30-35.769938.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-02-15T22-30-35.769938.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_02_15T22_30_35.769938", "path": ["**/details_harness|hendrycksTest-management|5_2024-02-15T22-30-35.769938.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-02-15T22-30-35.769938.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_02_15T22_30_35.769938", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-02-15T22-30-35.769938.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-02-15T22-30-35.769938.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_02_15T22_30_35.769938", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-02-15T22-30-35.769938.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-02-15T22-30-35.769938.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_02_15T22_30_35.769938", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-02-15T22-30-35.769938.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-02-15T22-30-35.769938.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_02_15T22_30_35.769938", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-02-15T22-30-35.769938.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-02-15T22-30-35.769938.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_02_15T22_30_35.769938", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-15T22-30-35.769938.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-15T22-30-35.769938.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_02_15T22_30_35.769938", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-02-15T22-30-35.769938.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-02-15T22-30-35.769938.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_02_15T22_30_35.769938", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-02-15T22-30-35.769938.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-02-15T22-30-35.769938.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_02_15T22_30_35.769938", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-02-15T22-30-35.769938.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-02-15T22-30-35.769938.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_02_15T22_30_35.769938", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-02-15T22-30-35.769938.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-02-15T22-30-35.769938.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_02_15T22_30_35.769938", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-02-15T22-30-35.769938.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-02-15T22-30-35.769938.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_02_15T22_30_35.769938", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-02-15T22-30-35.769938.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-02-15T22-30-35.769938.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_02_15T22_30_35.769938", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-02-15T22-30-35.769938.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-02-15T22-30-35.769938.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_02_15T22_30_35.769938", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-02-15T22-30-35.769938.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-02-15T22-30-35.769938.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_02_15T22_30_35.769938", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-02-15T22-30-35.769938.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-02-15T22-30-35.769938.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_02_15T22_30_35.769938", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-02-15T22-30-35.769938.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-02-15T22-30-35.769938.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_02_15T22_30_35.769938", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-15T22-30-35.769938.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-15T22-30-35.769938.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_02_15T22_30_35.769938", "path": ["**/details_harness|hendrycksTest-virology|5_2024-02-15T22-30-35.769938.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-02-15T22-30-35.769938.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_02_15T22_30_35.769938", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-02-15T22-30-35.769938.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-02-15T22-30-35.769938.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_02_15T22_30_35.769938", "path": ["**/details_harness|truthfulqa:mc|0_2024-02-15T22-30-35.769938.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-02-15T22-30-35.769938.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_02_15T22_30_35.769938", "path": ["**/details_harness|winogrande|5_2024-02-15T22-30-35.769938.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-02-15T22-30-35.769938.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_02_15T22_30_35.769938", "path": ["results_2024-02-15T22-30-35.769938.parquet"]}, {"split": "latest", "path": ["results_2024-02-15T22-30-35.769938.parquet"]}]}]}
2024-02-15T22:33:17+00:00
b3ee1a654517bd19bfc0b5bb4628619b4317dc98
mryannugent/synthetic-responses
[ "region:us" ]
2024-02-15T22:36:45+00:00
{}
2024-02-15T23:32:20+00:00
6a511f446d966a6003871f0781f56b511303dd16
# Dataset Card for Evaluation run of sonthenguyen/OpenHermes-2.5-Mistral-7B-mt-bench-DPO-reversed_corrupted <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [sonthenguyen/OpenHermes-2.5-Mistral-7B-mt-bench-DPO-reversed_corrupted](https://huggingface.co/sonthenguyen/OpenHermes-2.5-Mistral-7B-mt-bench-DPO-reversed_corrupted) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_sonthenguyen__OpenHermes-2.5-Mistral-7B-mt-bench-DPO-reversed_corrupted", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-02-15T22:34:32.769102](https://huggingface.co/datasets/open-llm-leaderboard/details_sonthenguyen__OpenHermes-2.5-Mistral-7B-mt-bench-DPO-reversed_corrupted/blob/main/results_2024-02-15T22-34-32.769102.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.6363878047023346, "acc_stderr": 0.032296390142505176, "acc_norm": 0.6396525594831768, "acc_norm_stderr": 0.03293649951708016, "mc1": 0.3525091799265606, "mc1_stderr": 0.016724646380756547, "mc2": 0.5164963215954579, "mc2_stderr": 0.015210020803636122 }, "harness|arc:challenge|25": { "acc": 0.60580204778157, "acc_stderr": 0.014280522667467325, "acc_norm": 0.64419795221843, "acc_norm_stderr": 0.01399057113791876 }, "harness|hellaswag|10": { "acc": 0.6483768173670583, "acc_stderr": 0.004765012078929387, "acc_norm": 0.8394742083250348, "acc_norm_stderr": 0.003663427536178161 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.34, "acc_stderr": 0.04760952285695236, "acc_norm": 0.34, "acc_norm_stderr": 0.04760952285695236 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.6, "acc_stderr": 0.04232073695151589, "acc_norm": 0.6, "acc_norm_stderr": 0.04232073695151589 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.7039473684210527, "acc_stderr": 0.03715062154998904, "acc_norm": 0.7039473684210527, "acc_norm_stderr": 0.03715062154998904 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.56, "acc_stderr": 0.04988876515698589, "acc_norm": 0.56, "acc_norm_stderr": 0.04988876515698589 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.6830188679245283, "acc_stderr": 0.0286372356398009, "acc_norm": 0.6830188679245283, "acc_norm_stderr": 0.0286372356398009 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.7569444444444444, "acc_stderr": 0.03586879280080341, "acc_norm": 0.7569444444444444, "acc_norm_stderr": 0.03586879280080341 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.47, "acc_stderr": 0.05016135580465919, "acc_norm": 0.47, "acc_norm_stderr": 0.05016135580465919 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.46, "acc_stderr": 0.05009082659620333, "acc_norm": 0.46, "acc_norm_stderr": 0.05009082659620333 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.29, "acc_stderr": 0.045604802157206845, "acc_norm": 0.29, "acc_norm_stderr": 0.045604802157206845 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.6184971098265896, "acc_stderr": 0.03703851193099521, "acc_norm": 0.6184971098265896, "acc_norm_stderr": 0.03703851193099521 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.38235294117647056, "acc_stderr": 0.04835503696107223, "acc_norm": 0.38235294117647056, "acc_norm_stderr": 0.04835503696107223 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.75, "acc_stderr": 0.04351941398892446, "acc_norm": 0.75, "acc_norm_stderr": 0.04351941398892446 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.5659574468085107, "acc_stderr": 0.03240038086792747, "acc_norm": 0.5659574468085107, "acc_norm_stderr": 0.03240038086792747 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.49122807017543857, "acc_stderr": 0.04702880432049615, "acc_norm": 0.49122807017543857, "acc_norm_stderr": 0.04702880432049615 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.5241379310344828, "acc_stderr": 0.0416180850350153, "acc_norm": 0.5241379310344828, "acc_norm_stderr": 0.0416180850350153 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.42857142857142855, "acc_stderr": 0.02548718714785938, "acc_norm": 0.42857142857142855, "acc_norm_stderr": 0.02548718714785938 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.4523809523809524, "acc_stderr": 0.044518079590553275, "acc_norm": 0.4523809523809524, "acc_norm_stderr": 0.044518079590553275 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.38, "acc_stderr": 0.048783173121456316, "acc_norm": 0.38, "acc_norm_stderr": 0.048783173121456316 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.7935483870967742, "acc_stderr": 0.02302589961718871, "acc_norm": 0.7935483870967742, "acc_norm_stderr": 0.02302589961718871 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.5172413793103449, "acc_stderr": 0.035158955511656986, "acc_norm": 0.5172413793103449, "acc_norm_stderr": 0.035158955511656986 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.68, "acc_stderr": 0.04688261722621505, "acc_norm": 0.68, "acc_norm_stderr": 0.04688261722621505 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.793939393939394, "acc_stderr": 0.031584153240477114, "acc_norm": 0.793939393939394, "acc_norm_stderr": 0.031584153240477114 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.797979797979798, "acc_stderr": 0.028606204289229865, "acc_norm": 0.797979797979798, "acc_norm_stderr": 0.028606204289229865 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.8860103626943006, "acc_stderr": 0.022935144053919443, "acc_norm": 0.8860103626943006, "acc_norm_stderr": 0.022935144053919443 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.6102564102564103, "acc_stderr": 0.024726967886647074, "acc_norm": 0.6102564102564103, "acc_norm_stderr": 0.024726967886647074 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.3, "acc_stderr": 0.0279404571362284, "acc_norm": 0.3, "acc_norm_stderr": 0.0279404571362284 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.6722689075630253, "acc_stderr": 0.03048991141767323, "acc_norm": 0.6722689075630253, "acc_norm_stderr": 0.03048991141767323 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.31788079470198677, "acc_stderr": 0.038020397601079024, "acc_norm": 0.31788079470198677, "acc_norm_stderr": 0.038020397601079024 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.8348623853211009, "acc_stderr": 0.01591955782997604, "acc_norm": 0.8348623853211009, "acc_norm_stderr": 0.01591955782997604 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.5092592592592593, "acc_stderr": 0.034093869469927006, "acc_norm": 0.5092592592592593, "acc_norm_stderr": 0.034093869469927006 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.7990196078431373, "acc_stderr": 0.02812597226565437, "acc_norm": 0.7990196078431373, "acc_norm_stderr": 0.02812597226565437 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.810126582278481, "acc_stderr": 0.02553010046023349, "acc_norm": 0.810126582278481, "acc_norm_stderr": 0.02553010046023349 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.695067264573991, "acc_stderr": 0.030898610882477518, "acc_norm": 0.695067264573991, "acc_norm_stderr": 0.030898610882477518 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.7938931297709924, "acc_stderr": 0.035477710041594654, "acc_norm": 0.7938931297709924, "acc_norm_stderr": 0.035477710041594654 }, "harness|hendrycksTest-international_law|5": { "acc": 0.7603305785123967, "acc_stderr": 0.03896878985070416, "acc_norm": 0.7603305785123967, "acc_norm_stderr": 0.03896878985070416 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.7777777777777778, "acc_stderr": 0.0401910747255735, "acc_norm": 0.7777777777777778, "acc_norm_stderr": 0.0401910747255735 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.7852760736196319, "acc_stderr": 0.032262193772867744, "acc_norm": 0.7852760736196319, "acc_norm_stderr": 0.032262193772867744 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.5178571428571429, "acc_stderr": 0.04742762361243011, "acc_norm": 0.5178571428571429, "acc_norm_stderr": 0.04742762361243011 }, "harness|hendrycksTest-management|5": { "acc": 0.7766990291262136, "acc_stderr": 0.04123553189891431, "acc_norm": 0.7766990291262136, "acc_norm_stderr": 0.04123553189891431 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8589743589743589, "acc_stderr": 0.022801382534597528, "acc_norm": 0.8589743589743589, "acc_norm_stderr": 0.022801382534597528 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.7, "acc_stderr": 0.046056618647183814, "acc_norm": 0.7, "acc_norm_stderr": 0.046056618647183814 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.8250319284802043, "acc_stderr": 0.013586619219903338, "acc_norm": 0.8250319284802043, "acc_norm_stderr": 0.013586619219903338 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.7167630057803468, "acc_stderr": 0.02425790170532338, "acc_norm": 0.7167630057803468, "acc_norm_stderr": 0.02425790170532338 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.30837988826815643, "acc_stderr": 0.01544571691099888, "acc_norm": 0.30837988826815643, "acc_norm_stderr": 0.01544571691099888 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.7516339869281046, "acc_stderr": 0.02473998135511359, "acc_norm": 0.7516339869281046, "acc_norm_stderr": 0.02473998135511359 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.6816720257234726, "acc_stderr": 0.026457225067811025, "acc_norm": 0.6816720257234726, "acc_norm_stderr": 0.026457225067811025 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.7530864197530864, "acc_stderr": 0.02399350170904211, "acc_norm": 0.7530864197530864, "acc_norm_stderr": 0.02399350170904211 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.5035460992907801, "acc_stderr": 0.02982674915328092, "acc_norm": 0.5035460992907801, "acc_norm_stderr": 0.02982674915328092 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.46870925684485004, "acc_stderr": 0.012745204626083131, "acc_norm": 0.46870925684485004, "acc_norm_stderr": 0.012745204626083131 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.6727941176470589, "acc_stderr": 0.02850145286039655, "acc_norm": 0.6727941176470589, "acc_norm_stderr": 0.02850145286039655 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.673202614379085, "acc_stderr": 0.01897542792050721, "acc_norm": 0.673202614379085, "acc_norm_stderr": 0.01897542792050721 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6545454545454545, "acc_stderr": 0.04554619617541054, "acc_norm": 0.6545454545454545, "acc_norm_stderr": 0.04554619617541054 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.7306122448979592, "acc_stderr": 0.02840125202902294, "acc_norm": 0.7306122448979592, "acc_norm_stderr": 0.02840125202902294 }, "harness|hendrycksTest-sociology|5": { "acc": 0.8159203980099502, "acc_stderr": 0.027403859410786845, "acc_norm": 0.8159203980099502, "acc_norm_stderr": 0.027403859410786845 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.87, "acc_stderr": 0.033799766898963086, "acc_norm": 0.87, "acc_norm_stderr": 0.033799766898963086 }, "harness|hendrycksTest-virology|5": { "acc": 0.5542168674698795, "acc_stderr": 0.03869543323472101, "acc_norm": 0.5542168674698795, "acc_norm_stderr": 0.03869543323472101 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.8304093567251462, "acc_stderr": 0.02878210810540171, "acc_norm": 0.8304093567251462, "acc_norm_stderr": 0.02878210810540171 }, "harness|truthfulqa:mc|0": { "mc1": 0.3525091799265606, "mc1_stderr": 0.016724646380756547, "mc2": 0.5164963215954579, "mc2_stderr": 0.015210020803636122 }, "harness|winogrande|5": { "acc": 0.7774269928966061, "acc_stderr": 0.011690933809712667 }, "harness|gsm8k|5": { "acc": 0.5322213798332069, "acc_stderr": 0.013743857303073797 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_sonthenguyen__OpenHermes-2.5-Mistral-7B-mt-bench-DPO-reversed_corrupted
[ "region:us" ]
2024-02-15T22:36:52+00:00
{"pretty_name": "Evaluation run of sonthenguyen/OpenHermes-2.5-Mistral-7B-mt-bench-DPO-reversed_corrupted", "dataset_summary": "Dataset automatically created during the evaluation run of model [sonthenguyen/OpenHermes-2.5-Mistral-7B-mt-bench-DPO-reversed_corrupted](https://huggingface.co/sonthenguyen/OpenHermes-2.5-Mistral-7B-mt-bench-DPO-reversed_corrupted) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_sonthenguyen__OpenHermes-2.5-Mistral-7B-mt-bench-DPO-reversed_corrupted\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-02-15T22:34:32.769102](https://huggingface.co/datasets/open-llm-leaderboard/details_sonthenguyen__OpenHermes-2.5-Mistral-7B-mt-bench-DPO-reversed_corrupted/blob/main/results_2024-02-15T22-34-32.769102.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.6363878047023346,\n \"acc_stderr\": 0.032296390142505176,\n \"acc_norm\": 0.6396525594831768,\n \"acc_norm_stderr\": 0.03293649951708016,\n \"mc1\": 0.3525091799265606,\n \"mc1_stderr\": 0.016724646380756547,\n \"mc2\": 0.5164963215954579,\n \"mc2_stderr\": 0.015210020803636122\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.60580204778157,\n \"acc_stderr\": 0.014280522667467325,\n \"acc_norm\": 0.64419795221843,\n \"acc_norm_stderr\": 0.01399057113791876\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6483768173670583,\n \"acc_stderr\": 0.004765012078929387,\n \"acc_norm\": 0.8394742083250348,\n \"acc_norm_stderr\": 0.003663427536178161\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.34,\n \"acc_stderr\": 0.04760952285695236,\n \"acc_norm\": 0.34,\n \"acc_norm_stderr\": 0.04760952285695236\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.6,\n \"acc_stderr\": 0.04232073695151589,\n \"acc_norm\": 0.6,\n \"acc_norm_stderr\": 0.04232073695151589\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.7039473684210527,\n \"acc_stderr\": 0.03715062154998904,\n \"acc_norm\": 0.7039473684210527,\n \"acc_norm_stderr\": 0.03715062154998904\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.56,\n \"acc_stderr\": 0.04988876515698589,\n \"acc_norm\": 0.56,\n \"acc_norm_stderr\": 0.04988876515698589\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.6830188679245283,\n \"acc_stderr\": 0.0286372356398009,\n \"acc_norm\": 0.6830188679245283,\n \"acc_norm_stderr\": 0.0286372356398009\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.7569444444444444,\n \"acc_stderr\": 0.03586879280080341,\n \"acc_norm\": 0.7569444444444444,\n \"acc_norm_stderr\": 0.03586879280080341\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.47,\n \"acc_stderr\": 0.05016135580465919,\n \"acc_norm\": 0.47,\n \"acc_norm_stderr\": 0.05016135580465919\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.46,\n \"acc_stderr\": 0.05009082659620333,\n \"acc_norm\": 0.46,\n \"acc_norm_stderr\": 0.05009082659620333\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.29,\n \"acc_stderr\": 0.045604802157206845,\n \"acc_norm\": 0.29,\n \"acc_norm_stderr\": 0.045604802157206845\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.6184971098265896,\n \"acc_stderr\": 0.03703851193099521,\n \"acc_norm\": 0.6184971098265896,\n \"acc_norm_stderr\": 0.03703851193099521\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.38235294117647056,\n \"acc_stderr\": 0.04835503696107223,\n \"acc_norm\": 0.38235294117647056,\n \"acc_norm_stderr\": 0.04835503696107223\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.75,\n \"acc_stderr\": 0.04351941398892446,\n \"acc_norm\": 0.75,\n \"acc_norm_stderr\": 0.04351941398892446\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.5659574468085107,\n \"acc_stderr\": 0.03240038086792747,\n \"acc_norm\": 0.5659574468085107,\n \"acc_norm_stderr\": 0.03240038086792747\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.49122807017543857,\n \"acc_stderr\": 0.04702880432049615,\n \"acc_norm\": 0.49122807017543857,\n \"acc_norm_stderr\": 0.04702880432049615\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.5241379310344828,\n \"acc_stderr\": 0.0416180850350153,\n \"acc_norm\": 0.5241379310344828,\n \"acc_norm_stderr\": 0.0416180850350153\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.42857142857142855,\n \"acc_stderr\": 0.02548718714785938,\n \"acc_norm\": 0.42857142857142855,\n \"acc_norm_stderr\": 0.02548718714785938\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.4523809523809524,\n \"acc_stderr\": 0.044518079590553275,\n \"acc_norm\": 0.4523809523809524,\n \"acc_norm_stderr\": 0.044518079590553275\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.38,\n \"acc_stderr\": 0.048783173121456316,\n \"acc_norm\": 0.38,\n \"acc_norm_stderr\": 0.048783173121456316\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.7935483870967742,\n \"acc_stderr\": 0.02302589961718871,\n \"acc_norm\": 0.7935483870967742,\n \"acc_norm_stderr\": 0.02302589961718871\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.5172413793103449,\n \"acc_stderr\": 0.035158955511656986,\n \"acc_norm\": 0.5172413793103449,\n \"acc_norm_stderr\": 0.035158955511656986\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.68,\n \"acc_stderr\": 0.04688261722621505,\n \"acc_norm\": 0.68,\n \"acc_norm_stderr\": 0.04688261722621505\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.793939393939394,\n \"acc_stderr\": 0.031584153240477114,\n \"acc_norm\": 0.793939393939394,\n \"acc_norm_stderr\": 0.031584153240477114\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.797979797979798,\n \"acc_stderr\": 0.028606204289229865,\n \"acc_norm\": 0.797979797979798,\n \"acc_norm_stderr\": 0.028606204289229865\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.8860103626943006,\n \"acc_stderr\": 0.022935144053919443,\n \"acc_norm\": 0.8860103626943006,\n \"acc_norm_stderr\": 0.022935144053919443\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.6102564102564103,\n \"acc_stderr\": 0.024726967886647074,\n \"acc_norm\": 0.6102564102564103,\n \"acc_norm_stderr\": 0.024726967886647074\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.3,\n \"acc_stderr\": 0.0279404571362284,\n \"acc_norm\": 0.3,\n \"acc_norm_stderr\": 0.0279404571362284\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.6722689075630253,\n \"acc_stderr\": 0.03048991141767323,\n \"acc_norm\": 0.6722689075630253,\n \"acc_norm_stderr\": 0.03048991141767323\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.31788079470198677,\n \"acc_stderr\": 0.038020397601079024,\n \"acc_norm\": 0.31788079470198677,\n \"acc_norm_stderr\": 0.038020397601079024\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.8348623853211009,\n \"acc_stderr\": 0.01591955782997604,\n \"acc_norm\": 0.8348623853211009,\n \"acc_norm_stderr\": 0.01591955782997604\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.5092592592592593,\n \"acc_stderr\": 0.034093869469927006,\n \"acc_norm\": 0.5092592592592593,\n \"acc_norm_stderr\": 0.034093869469927006\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.7990196078431373,\n \"acc_stderr\": 0.02812597226565437,\n \"acc_norm\": 0.7990196078431373,\n \"acc_norm_stderr\": 0.02812597226565437\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.810126582278481,\n \"acc_stderr\": 0.02553010046023349,\n \"acc_norm\": 0.810126582278481,\n \"acc_norm_stderr\": 0.02553010046023349\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.695067264573991,\n \"acc_stderr\": 0.030898610882477518,\n \"acc_norm\": 0.695067264573991,\n \"acc_norm_stderr\": 0.030898610882477518\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.7938931297709924,\n \"acc_stderr\": 0.035477710041594654,\n \"acc_norm\": 0.7938931297709924,\n \"acc_norm_stderr\": 0.035477710041594654\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.7603305785123967,\n \"acc_stderr\": 0.03896878985070416,\n \"acc_norm\": 0.7603305785123967,\n \"acc_norm_stderr\": 0.03896878985070416\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.7777777777777778,\n \"acc_stderr\": 0.0401910747255735,\n \"acc_norm\": 0.7777777777777778,\n \"acc_norm_stderr\": 0.0401910747255735\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.7852760736196319,\n \"acc_stderr\": 0.032262193772867744,\n \"acc_norm\": 0.7852760736196319,\n \"acc_norm_stderr\": 0.032262193772867744\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.5178571428571429,\n \"acc_stderr\": 0.04742762361243011,\n \"acc_norm\": 0.5178571428571429,\n \"acc_norm_stderr\": 0.04742762361243011\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.7766990291262136,\n \"acc_stderr\": 0.04123553189891431,\n \"acc_norm\": 0.7766990291262136,\n \"acc_norm_stderr\": 0.04123553189891431\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8589743589743589,\n \"acc_stderr\": 0.022801382534597528,\n \"acc_norm\": 0.8589743589743589,\n \"acc_norm_stderr\": 0.022801382534597528\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.7,\n \"acc_stderr\": 0.046056618647183814,\n \"acc_norm\": 0.7,\n \"acc_norm_stderr\": 0.046056618647183814\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.8250319284802043,\n \"acc_stderr\": 0.013586619219903338,\n \"acc_norm\": 0.8250319284802043,\n \"acc_norm_stderr\": 0.013586619219903338\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.7167630057803468,\n \"acc_stderr\": 0.02425790170532338,\n \"acc_norm\": 0.7167630057803468,\n \"acc_norm_stderr\": 0.02425790170532338\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.30837988826815643,\n \"acc_stderr\": 0.01544571691099888,\n \"acc_norm\": 0.30837988826815643,\n \"acc_norm_stderr\": 0.01544571691099888\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.7516339869281046,\n \"acc_stderr\": 0.02473998135511359,\n \"acc_norm\": 0.7516339869281046,\n \"acc_norm_stderr\": 0.02473998135511359\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.6816720257234726,\n \"acc_stderr\": 0.026457225067811025,\n \"acc_norm\": 0.6816720257234726,\n \"acc_norm_stderr\": 0.026457225067811025\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.7530864197530864,\n \"acc_stderr\": 0.02399350170904211,\n \"acc_norm\": 0.7530864197530864,\n \"acc_norm_stderr\": 0.02399350170904211\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.5035460992907801,\n \"acc_stderr\": 0.02982674915328092,\n \"acc_norm\": 0.5035460992907801,\n \"acc_norm_stderr\": 0.02982674915328092\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.46870925684485004,\n \"acc_stderr\": 0.012745204626083131,\n \"acc_norm\": 0.46870925684485004,\n \"acc_norm_stderr\": 0.012745204626083131\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.6727941176470589,\n \"acc_stderr\": 0.02850145286039655,\n \"acc_norm\": 0.6727941176470589,\n \"acc_norm_stderr\": 0.02850145286039655\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.673202614379085,\n \"acc_stderr\": 0.01897542792050721,\n \"acc_norm\": 0.673202614379085,\n \"acc_norm_stderr\": 0.01897542792050721\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6545454545454545,\n \"acc_stderr\": 0.04554619617541054,\n \"acc_norm\": 0.6545454545454545,\n \"acc_norm_stderr\": 0.04554619617541054\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.7306122448979592,\n \"acc_stderr\": 0.02840125202902294,\n \"acc_norm\": 0.7306122448979592,\n \"acc_norm_stderr\": 0.02840125202902294\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.8159203980099502,\n \"acc_stderr\": 0.027403859410786845,\n \"acc_norm\": 0.8159203980099502,\n \"acc_norm_stderr\": 0.027403859410786845\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.87,\n \"acc_stderr\": 0.033799766898963086,\n \"acc_norm\": 0.87,\n \"acc_norm_stderr\": 0.033799766898963086\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5542168674698795,\n \"acc_stderr\": 0.03869543323472101,\n \"acc_norm\": 0.5542168674698795,\n \"acc_norm_stderr\": 0.03869543323472101\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.8304093567251462,\n \"acc_stderr\": 0.02878210810540171,\n \"acc_norm\": 0.8304093567251462,\n \"acc_norm_stderr\": 0.02878210810540171\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.3525091799265606,\n \"mc1_stderr\": 0.016724646380756547,\n \"mc2\": 0.5164963215954579,\n \"mc2_stderr\": 0.015210020803636122\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7774269928966061,\n \"acc_stderr\": 0.011690933809712667\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.5322213798332069,\n \"acc_stderr\": 0.013743857303073797\n }\n}\n```", "repo_url": "https://huggingface.co/sonthenguyen/OpenHermes-2.5-Mistral-7B-mt-bench-DPO-reversed_corrupted", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_02_15T22_34_32.769102", "path": ["**/details_harness|arc:challenge|25_2024-02-15T22-34-32.769102.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-02-15T22-34-32.769102.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_02_15T22_34_32.769102", "path": ["**/details_harness|gsm8k|5_2024-02-15T22-34-32.769102.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-02-15T22-34-32.769102.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_02_15T22_34_32.769102", "path": ["**/details_harness|hellaswag|10_2024-02-15T22-34-32.769102.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-02-15T22-34-32.769102.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_02_15T22_34_32.769102", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-15T22-34-32.769102.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-02-15T22-34-32.769102.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-02-15T22-34-32.769102.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-02-15T22-34-32.769102.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-15T22-34-32.769102.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-02-15T22-34-32.769102.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-02-15T22-34-32.769102.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-02-15T22-34-32.769102.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-02-15T22-34-32.769102.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-02-15T22-34-32.769102.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-02-15T22-34-32.769102.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-02-15T22-34-32.769102.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-15T22-34-32.769102.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-02-15T22-34-32.769102.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-15T22-34-32.769102.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-15T22-34-32.769102.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-02-15T22-34-32.769102.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-02-15T22-34-32.769102.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-02-15T22-34-32.769102.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-15T22-34-32.769102.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-15T22-34-32.769102.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-15T22-34-32.769102.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-02-15T22-34-32.769102.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-15T22-34-32.769102.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-15T22-34-32.769102.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-15T22-34-32.769102.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-15T22-34-32.769102.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-02-15T22-34-32.769102.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-15T22-34-32.769102.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-15T22-34-32.769102.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-15T22-34-32.769102.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-15T22-34-32.769102.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-02-15T22-34-32.769102.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-02-15T22-34-32.769102.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-02-15T22-34-32.769102.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-02-15T22-34-32.769102.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-15T22-34-32.769102.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-02-15T22-34-32.769102.parquet", "**/details_harness|hendrycksTest-management|5_2024-02-15T22-34-32.769102.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-02-15T22-34-32.769102.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-02-15T22-34-32.769102.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-02-15T22-34-32.769102.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-02-15T22-34-32.769102.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-15T22-34-32.769102.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-02-15T22-34-32.769102.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-02-15T22-34-32.769102.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-02-15T22-34-32.769102.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-02-15T22-34-32.769102.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-02-15T22-34-32.769102.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-02-15T22-34-32.769102.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-02-15T22-34-32.769102.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-02-15T22-34-32.769102.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-02-15T22-34-32.769102.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-02-15T22-34-32.769102.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-15T22-34-32.769102.parquet", "**/details_harness|hendrycksTest-virology|5_2024-02-15T22-34-32.769102.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-02-15T22-34-32.769102.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-15T22-34-32.769102.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-02-15T22-34-32.769102.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-02-15T22-34-32.769102.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-02-15T22-34-32.769102.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-15T22-34-32.769102.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-02-15T22-34-32.769102.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-02-15T22-34-32.769102.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-02-15T22-34-32.769102.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-02-15T22-34-32.769102.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-02-15T22-34-32.769102.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-02-15T22-34-32.769102.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-02-15T22-34-32.769102.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-15T22-34-32.769102.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-02-15T22-34-32.769102.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-15T22-34-32.769102.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-15T22-34-32.769102.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-02-15T22-34-32.769102.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-02-15T22-34-32.769102.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-02-15T22-34-32.769102.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-15T22-34-32.769102.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-15T22-34-32.769102.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-15T22-34-32.769102.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-02-15T22-34-32.769102.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-15T22-34-32.769102.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-15T22-34-32.769102.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-15T22-34-32.769102.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-15T22-34-32.769102.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-02-15T22-34-32.769102.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-15T22-34-32.769102.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-15T22-34-32.769102.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-15T22-34-32.769102.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-15T22-34-32.769102.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-02-15T22-34-32.769102.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-02-15T22-34-32.769102.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-02-15T22-34-32.769102.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-02-15T22-34-32.769102.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-15T22-34-32.769102.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-02-15T22-34-32.769102.parquet", "**/details_harness|hendrycksTest-management|5_2024-02-15T22-34-32.769102.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-02-15T22-34-32.769102.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-02-15T22-34-32.769102.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-02-15T22-34-32.769102.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-02-15T22-34-32.769102.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-15T22-34-32.769102.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-02-15T22-34-32.769102.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-02-15T22-34-32.769102.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-02-15T22-34-32.769102.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-02-15T22-34-32.769102.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-02-15T22-34-32.769102.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-02-15T22-34-32.769102.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-02-15T22-34-32.769102.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-02-15T22-34-32.769102.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-02-15T22-34-32.769102.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-02-15T22-34-32.769102.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-15T22-34-32.769102.parquet", "**/details_harness|hendrycksTest-virology|5_2024-02-15T22-34-32.769102.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-02-15T22-34-32.769102.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_02_15T22_34_32.769102", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-15T22-34-32.769102.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-15T22-34-32.769102.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_02_15T22_34_32.769102", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-02-15T22-34-32.769102.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-02-15T22-34-32.769102.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_02_15T22_34_32.769102", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-02-15T22-34-32.769102.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-02-15T22-34-32.769102.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_02_15T22_34_32.769102", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-02-15T22-34-32.769102.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-02-15T22-34-32.769102.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_02_15T22_34_32.769102", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-15T22-34-32.769102.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-15T22-34-32.769102.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_02_15T22_34_32.769102", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-02-15T22-34-32.769102.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-02-15T22-34-32.769102.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_02_15T22_34_32.769102", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-02-15T22-34-32.769102.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-02-15T22-34-32.769102.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_02_15T22_34_32.769102", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-02-15T22-34-32.769102.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-02-15T22-34-32.769102.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_02_15T22_34_32.769102", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-02-15T22-34-32.769102.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-02-15T22-34-32.769102.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_02_15T22_34_32.769102", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-02-15T22-34-32.769102.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-02-15T22-34-32.769102.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_02_15T22_34_32.769102", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-02-15T22-34-32.769102.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-02-15T22-34-32.769102.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_02_15T22_34_32.769102", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-02-15T22-34-32.769102.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-02-15T22-34-32.769102.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_02_15T22_34_32.769102", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-15T22-34-32.769102.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-15T22-34-32.769102.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_02_15T22_34_32.769102", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-02-15T22-34-32.769102.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-02-15T22-34-32.769102.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_02_15T22_34_32.769102", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-15T22-34-32.769102.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-15T22-34-32.769102.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_02_15T22_34_32.769102", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-15T22-34-32.769102.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-15T22-34-32.769102.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_02_15T22_34_32.769102", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-02-15T22-34-32.769102.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-02-15T22-34-32.769102.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_02_15T22_34_32.769102", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-02-15T22-34-32.769102.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-02-15T22-34-32.769102.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_02_15T22_34_32.769102", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-02-15T22-34-32.769102.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-02-15T22-34-32.769102.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_02_15T22_34_32.769102", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-15T22-34-32.769102.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-15T22-34-32.769102.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_02_15T22_34_32.769102", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-15T22-34-32.769102.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-15T22-34-32.769102.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_02_15T22_34_32.769102", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-15T22-34-32.769102.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-15T22-34-32.769102.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_02_15T22_34_32.769102", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-02-15T22-34-32.769102.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-02-15T22-34-32.769102.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_02_15T22_34_32.769102", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-15T22-34-32.769102.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-15T22-34-32.769102.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_02_15T22_34_32.769102", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-15T22-34-32.769102.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-15T22-34-32.769102.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_02_15T22_34_32.769102", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-15T22-34-32.769102.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-15T22-34-32.769102.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_02_15T22_34_32.769102", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-15T22-34-32.769102.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-15T22-34-32.769102.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_02_15T22_34_32.769102", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-02-15T22-34-32.769102.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-02-15T22-34-32.769102.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_02_15T22_34_32.769102", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-15T22-34-32.769102.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-15T22-34-32.769102.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_02_15T22_34_32.769102", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-15T22-34-32.769102.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-15T22-34-32.769102.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_02_15T22_34_32.769102", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-15T22-34-32.769102.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-15T22-34-32.769102.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_02_15T22_34_32.769102", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-15T22-34-32.769102.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-15T22-34-32.769102.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_02_15T22_34_32.769102", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-02-15T22-34-32.769102.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-02-15T22-34-32.769102.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_02_15T22_34_32.769102", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-02-15T22-34-32.769102.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-02-15T22-34-32.769102.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_02_15T22_34_32.769102", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-02-15T22-34-32.769102.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-02-15T22-34-32.769102.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_02_15T22_34_32.769102", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-02-15T22-34-32.769102.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-02-15T22-34-32.769102.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_02_15T22_34_32.769102", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-15T22-34-32.769102.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-15T22-34-32.769102.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_02_15T22_34_32.769102", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-02-15T22-34-32.769102.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-02-15T22-34-32.769102.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_02_15T22_34_32.769102", "path": ["**/details_harness|hendrycksTest-management|5_2024-02-15T22-34-32.769102.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-02-15T22-34-32.769102.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_02_15T22_34_32.769102", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-02-15T22-34-32.769102.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-02-15T22-34-32.769102.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_02_15T22_34_32.769102", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-02-15T22-34-32.769102.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-02-15T22-34-32.769102.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_02_15T22_34_32.769102", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-02-15T22-34-32.769102.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-02-15T22-34-32.769102.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_02_15T22_34_32.769102", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-02-15T22-34-32.769102.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-02-15T22-34-32.769102.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_02_15T22_34_32.769102", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-15T22-34-32.769102.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-15T22-34-32.769102.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_02_15T22_34_32.769102", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-02-15T22-34-32.769102.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-02-15T22-34-32.769102.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_02_15T22_34_32.769102", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-02-15T22-34-32.769102.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-02-15T22-34-32.769102.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_02_15T22_34_32.769102", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-02-15T22-34-32.769102.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-02-15T22-34-32.769102.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_02_15T22_34_32.769102", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-02-15T22-34-32.769102.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-02-15T22-34-32.769102.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_02_15T22_34_32.769102", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-02-15T22-34-32.769102.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-02-15T22-34-32.769102.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_02_15T22_34_32.769102", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-02-15T22-34-32.769102.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-02-15T22-34-32.769102.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_02_15T22_34_32.769102", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-02-15T22-34-32.769102.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-02-15T22-34-32.769102.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_02_15T22_34_32.769102", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-02-15T22-34-32.769102.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-02-15T22-34-32.769102.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_02_15T22_34_32.769102", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-02-15T22-34-32.769102.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-02-15T22-34-32.769102.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_02_15T22_34_32.769102", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-02-15T22-34-32.769102.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-02-15T22-34-32.769102.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_02_15T22_34_32.769102", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-15T22-34-32.769102.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-15T22-34-32.769102.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_02_15T22_34_32.769102", "path": ["**/details_harness|hendrycksTest-virology|5_2024-02-15T22-34-32.769102.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-02-15T22-34-32.769102.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_02_15T22_34_32.769102", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-02-15T22-34-32.769102.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-02-15T22-34-32.769102.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_02_15T22_34_32.769102", "path": ["**/details_harness|truthfulqa:mc|0_2024-02-15T22-34-32.769102.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-02-15T22-34-32.769102.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_02_15T22_34_32.769102", "path": ["**/details_harness|winogrande|5_2024-02-15T22-34-32.769102.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-02-15T22-34-32.769102.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_02_15T22_34_32.769102", "path": ["results_2024-02-15T22-34-32.769102.parquet"]}, {"split": "latest", "path": ["results_2024-02-15T22-34-32.769102.parquet"]}]}]}
2024-02-15T22:37:18+00:00
58c6886f62bf6f4139598db460db5bb4693048ff
# Dataset Card for Evaluation run of sonthenguyen/OpenHermes-2.5-Mistral-7B-mt-bench-DPO-original-v2 <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [sonthenguyen/OpenHermes-2.5-Mistral-7B-mt-bench-DPO-original-v2](https://huggingface.co/sonthenguyen/OpenHermes-2.5-Mistral-7B-mt-bench-DPO-original-v2) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_sonthenguyen__OpenHermes-2.5-Mistral-7B-mt-bench-DPO-original-v2", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-02-15T22:39:42.033476](https://huggingface.co/datasets/open-llm-leaderboard/details_sonthenguyen__OpenHermes-2.5-Mistral-7B-mt-bench-DPO-original-v2/blob/main/results_2024-02-15T22-39-42.033476.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.6371686329291266, "acc_stderr": 0.03229837388268866, "acc_norm": 0.6400332159863307, "acc_norm_stderr": 0.03293928846555229, "mc1": 0.35862913096695226, "mc1_stderr": 0.016789289499502022, "mc2": 0.5239619444151867, "mc2_stderr": 0.015249337191329784 }, "harness|arc:challenge|25": { "acc": 0.6126279863481229, "acc_stderr": 0.014235872487909869, "acc_norm": 0.6493174061433447, "acc_norm_stderr": 0.013944635930726099 }, "harness|hellaswag|10": { "acc": 0.6542521410077674, "acc_stderr": 0.0047463946133845325, "acc_norm": 0.8454491137223661, "acc_norm_stderr": 0.0036073726062950894 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.34, "acc_stderr": 0.04760952285695236, "acc_norm": 0.34, "acc_norm_stderr": 0.04760952285695236 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.6074074074074074, "acc_stderr": 0.0421850621536888, "acc_norm": 0.6074074074074074, "acc_norm_stderr": 0.0421850621536888 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.7039473684210527, "acc_stderr": 0.03715062154998904, "acc_norm": 0.7039473684210527, "acc_norm_stderr": 0.03715062154998904 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.56, "acc_stderr": 0.04988876515698589, "acc_norm": 0.56, "acc_norm_stderr": 0.04988876515698589 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.6830188679245283, "acc_stderr": 0.028637235639800893, "acc_norm": 0.6830188679245283, "acc_norm_stderr": 0.028637235639800893 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.7569444444444444, "acc_stderr": 0.03586879280080341, "acc_norm": 0.7569444444444444, "acc_norm_stderr": 0.03586879280080341 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.48, "acc_stderr": 0.050211673156867795, "acc_norm": 0.48, "acc_norm_stderr": 0.050211673156867795 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.46, "acc_stderr": 0.05009082659620333, "acc_norm": 0.46, "acc_norm_stderr": 0.05009082659620333 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.29, "acc_stderr": 0.045604802157206845, "acc_norm": 0.29, "acc_norm_stderr": 0.045604802157206845 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.6242774566473989, "acc_stderr": 0.036928207672648664, "acc_norm": 0.6242774566473989, "acc_norm_stderr": 0.036928207672648664 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.38235294117647056, "acc_stderr": 0.04835503696107223, "acc_norm": 0.38235294117647056, "acc_norm_stderr": 0.04835503696107223 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.76, "acc_stderr": 0.042923469599092816, "acc_norm": 0.76, "acc_norm_stderr": 0.042923469599092816 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.548936170212766, "acc_stderr": 0.032529096196131965, "acc_norm": 0.548936170212766, "acc_norm_stderr": 0.032529096196131965 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.49122807017543857, "acc_stderr": 0.04702880432049615, "acc_norm": 0.49122807017543857, "acc_norm_stderr": 0.04702880432049615 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.5310344827586206, "acc_stderr": 0.04158632762097828, "acc_norm": 0.5310344827586206, "acc_norm_stderr": 0.04158632762097828 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.42592592592592593, "acc_stderr": 0.02546714904546955, "acc_norm": 0.42592592592592593, "acc_norm_stderr": 0.02546714904546955 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.4603174603174603, "acc_stderr": 0.04458029125470973, "acc_norm": 0.4603174603174603, "acc_norm_stderr": 0.04458029125470973 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.38, "acc_stderr": 0.048783173121456316, "acc_norm": 0.38, "acc_norm_stderr": 0.048783173121456316 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.7870967741935484, "acc_stderr": 0.023287665127268552, "acc_norm": 0.7870967741935484, "acc_norm_stderr": 0.023287665127268552 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.5221674876847291, "acc_stderr": 0.03514528562175007, "acc_norm": 0.5221674876847291, "acc_norm_stderr": 0.03514528562175007 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.66, "acc_stderr": 0.04760952285695237, "acc_norm": 0.66, "acc_norm_stderr": 0.04760952285695237 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.7818181818181819, "acc_stderr": 0.032250781083062896, "acc_norm": 0.7818181818181819, "acc_norm_stderr": 0.032250781083062896 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.803030303030303, "acc_stderr": 0.02833560973246336, "acc_norm": 0.803030303030303, "acc_norm_stderr": 0.02833560973246336 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.8860103626943006, "acc_stderr": 0.022935144053919443, "acc_norm": 0.8860103626943006, "acc_norm_stderr": 0.022935144053919443 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.6153846153846154, "acc_stderr": 0.024666744915187208, "acc_norm": 0.6153846153846154, "acc_norm_stderr": 0.024666744915187208 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.3, "acc_stderr": 0.027940457136228405, "acc_norm": 0.3, "acc_norm_stderr": 0.027940457136228405 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.6764705882352942, "acc_stderr": 0.030388353551886797, "acc_norm": 0.6764705882352942, "acc_norm_stderr": 0.030388353551886797 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.31788079470198677, "acc_stderr": 0.038020397601079024, "acc_norm": 0.31788079470198677, "acc_norm_stderr": 0.038020397601079024 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.8330275229357799, "acc_stderr": 0.01599015488507338, "acc_norm": 0.8330275229357799, "acc_norm_stderr": 0.01599015488507338 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.5138888888888888, "acc_stderr": 0.034086558679777494, "acc_norm": 0.5138888888888888, "acc_norm_stderr": 0.034086558679777494 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.7990196078431373, "acc_stderr": 0.02812597226565437, "acc_norm": 0.7990196078431373, "acc_norm_stderr": 0.02812597226565437 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.8185654008438819, "acc_stderr": 0.02508596114457966, "acc_norm": 0.8185654008438819, "acc_norm_stderr": 0.02508596114457966 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.7040358744394619, "acc_stderr": 0.030636591348699803, "acc_norm": 0.7040358744394619, "acc_norm_stderr": 0.030636591348699803 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.7862595419847328, "acc_stderr": 0.0359546161177469, "acc_norm": 0.7862595419847328, "acc_norm_stderr": 0.0359546161177469 }, "harness|hendrycksTest-international_law|5": { "acc": 0.7603305785123967, "acc_stderr": 0.03896878985070416, "acc_norm": 0.7603305785123967, "acc_norm_stderr": 0.03896878985070416 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.7870370370370371, "acc_stderr": 0.039578354719809805, "acc_norm": 0.7870370370370371, "acc_norm_stderr": 0.039578354719809805 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.7791411042944786, "acc_stderr": 0.03259177392742178, "acc_norm": 0.7791411042944786, "acc_norm_stderr": 0.03259177392742178 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.49107142857142855, "acc_stderr": 0.04745033255489123, "acc_norm": 0.49107142857142855, "acc_norm_stderr": 0.04745033255489123 }, "harness|hendrycksTest-management|5": { "acc": 0.7669902912621359, "acc_stderr": 0.04185832598928315, "acc_norm": 0.7669902912621359, "acc_norm_stderr": 0.04185832598928315 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8589743589743589, "acc_stderr": 0.022801382534597528, "acc_norm": 0.8589743589743589, "acc_norm_stderr": 0.022801382534597528 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.7, "acc_stderr": 0.046056618647183814, "acc_norm": 0.7, "acc_norm_stderr": 0.046056618647183814 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.8275862068965517, "acc_stderr": 0.013507943909371803, "acc_norm": 0.8275862068965517, "acc_norm_stderr": 0.013507943909371803 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.7138728323699421, "acc_stderr": 0.024332146779134128, "acc_norm": 0.7138728323699421, "acc_norm_stderr": 0.024332146779134128 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.3094972067039106, "acc_stderr": 0.015461169002371542, "acc_norm": 0.3094972067039106, "acc_norm_stderr": 0.015461169002371542 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.7549019607843137, "acc_stderr": 0.024630048979824782, "acc_norm": 0.7549019607843137, "acc_norm_stderr": 0.024630048979824782 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.6881028938906752, "acc_stderr": 0.02631185807185416, "acc_norm": 0.6881028938906752, "acc_norm_stderr": 0.02631185807185416 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.7530864197530864, "acc_stderr": 0.02399350170904211, "acc_norm": 0.7530864197530864, "acc_norm_stderr": 0.02399350170904211 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.5035460992907801, "acc_stderr": 0.02982674915328092, "acc_norm": 0.5035460992907801, "acc_norm_stderr": 0.02982674915328092 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.4661016949152542, "acc_stderr": 0.01274085387294983, "acc_norm": 0.4661016949152542, "acc_norm_stderr": 0.01274085387294983 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.6801470588235294, "acc_stderr": 0.028332959514031204, "acc_norm": 0.6801470588235294, "acc_norm_stderr": 0.028332959514031204 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.6699346405228758, "acc_stderr": 0.019023726160724553, "acc_norm": 0.6699346405228758, "acc_norm_stderr": 0.019023726160724553 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6454545454545455, "acc_stderr": 0.045820048415054174, "acc_norm": 0.6454545454545455, "acc_norm_stderr": 0.045820048415054174 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.7387755102040816, "acc_stderr": 0.028123429335142773, "acc_norm": 0.7387755102040816, "acc_norm_stderr": 0.028123429335142773 }, "harness|hendrycksTest-sociology|5": { "acc": 0.8159203980099502, "acc_stderr": 0.027403859410786845, "acc_norm": 0.8159203980099502, "acc_norm_stderr": 0.027403859410786845 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.87, "acc_stderr": 0.033799766898963086, "acc_norm": 0.87, "acc_norm_stderr": 0.033799766898963086 }, "harness|hendrycksTest-virology|5": { "acc": 0.5662650602409639, "acc_stderr": 0.03858158940685516, "acc_norm": 0.5662650602409639, "acc_norm_stderr": 0.03858158940685516 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.8304093567251462, "acc_stderr": 0.02878210810540171, "acc_norm": 0.8304093567251462, "acc_norm_stderr": 0.02878210810540171 }, "harness|truthfulqa:mc|0": { "mc1": 0.35862913096695226, "mc1_stderr": 0.016789289499502022, "mc2": 0.5239619444151867, "mc2_stderr": 0.015249337191329784 }, "harness|winogrande|5": { "acc": 0.7790055248618785, "acc_stderr": 0.011661223637643412 }, "harness|gsm8k|5": { "acc": 0.55420773313116, "acc_stderr": 0.01369130517450669 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_sonthenguyen__OpenHermes-2.5-Mistral-7B-mt-bench-DPO-original-v2
[ "region:us" ]
2024-02-15T22:41:58+00:00
{"pretty_name": "Evaluation run of sonthenguyen/OpenHermes-2.5-Mistral-7B-mt-bench-DPO-original-v2", "dataset_summary": "Dataset automatically created during the evaluation run of model [sonthenguyen/OpenHermes-2.5-Mistral-7B-mt-bench-DPO-original-v2](https://huggingface.co/sonthenguyen/OpenHermes-2.5-Mistral-7B-mt-bench-DPO-original-v2) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_sonthenguyen__OpenHermes-2.5-Mistral-7B-mt-bench-DPO-original-v2\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-02-15T22:39:42.033476](https://huggingface.co/datasets/open-llm-leaderboard/details_sonthenguyen__OpenHermes-2.5-Mistral-7B-mt-bench-DPO-original-v2/blob/main/results_2024-02-15T22-39-42.033476.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.6371686329291266,\n \"acc_stderr\": 0.03229837388268866,\n \"acc_norm\": 0.6400332159863307,\n \"acc_norm_stderr\": 0.03293928846555229,\n \"mc1\": 0.35862913096695226,\n \"mc1_stderr\": 0.016789289499502022,\n \"mc2\": 0.5239619444151867,\n \"mc2_stderr\": 0.015249337191329784\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.6126279863481229,\n \"acc_stderr\": 0.014235872487909869,\n \"acc_norm\": 0.6493174061433447,\n \"acc_norm_stderr\": 0.013944635930726099\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6542521410077674,\n \"acc_stderr\": 0.0047463946133845325,\n \"acc_norm\": 0.8454491137223661,\n \"acc_norm_stderr\": 0.0036073726062950894\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.34,\n \"acc_stderr\": 0.04760952285695236,\n \"acc_norm\": 0.34,\n \"acc_norm_stderr\": 0.04760952285695236\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.6074074074074074,\n \"acc_stderr\": 0.0421850621536888,\n \"acc_norm\": 0.6074074074074074,\n \"acc_norm_stderr\": 0.0421850621536888\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.7039473684210527,\n \"acc_stderr\": 0.03715062154998904,\n \"acc_norm\": 0.7039473684210527,\n \"acc_norm_stderr\": 0.03715062154998904\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.56,\n \"acc_stderr\": 0.04988876515698589,\n \"acc_norm\": 0.56,\n \"acc_norm_stderr\": 0.04988876515698589\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.6830188679245283,\n \"acc_stderr\": 0.028637235639800893,\n \"acc_norm\": 0.6830188679245283,\n \"acc_norm_stderr\": 0.028637235639800893\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.7569444444444444,\n \"acc_stderr\": 0.03586879280080341,\n \"acc_norm\": 0.7569444444444444,\n \"acc_norm_stderr\": 0.03586879280080341\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.48,\n \"acc_stderr\": 0.050211673156867795,\n \"acc_norm\": 0.48,\n \"acc_norm_stderr\": 0.050211673156867795\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.46,\n \"acc_stderr\": 0.05009082659620333,\n \"acc_norm\": 0.46,\n \"acc_norm_stderr\": 0.05009082659620333\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.29,\n \"acc_stderr\": 0.045604802157206845,\n \"acc_norm\": 0.29,\n \"acc_norm_stderr\": 0.045604802157206845\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.6242774566473989,\n \"acc_stderr\": 0.036928207672648664,\n \"acc_norm\": 0.6242774566473989,\n \"acc_norm_stderr\": 0.036928207672648664\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.38235294117647056,\n \"acc_stderr\": 0.04835503696107223,\n \"acc_norm\": 0.38235294117647056,\n \"acc_norm_stderr\": 0.04835503696107223\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.76,\n \"acc_stderr\": 0.042923469599092816,\n \"acc_norm\": 0.76,\n \"acc_norm_stderr\": 0.042923469599092816\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.548936170212766,\n \"acc_stderr\": 0.032529096196131965,\n \"acc_norm\": 0.548936170212766,\n \"acc_norm_stderr\": 0.032529096196131965\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.49122807017543857,\n \"acc_stderr\": 0.04702880432049615,\n \"acc_norm\": 0.49122807017543857,\n \"acc_norm_stderr\": 0.04702880432049615\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.5310344827586206,\n \"acc_stderr\": 0.04158632762097828,\n \"acc_norm\": 0.5310344827586206,\n \"acc_norm_stderr\": 0.04158632762097828\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.42592592592592593,\n \"acc_stderr\": 0.02546714904546955,\n \"acc_norm\": 0.42592592592592593,\n \"acc_norm_stderr\": 0.02546714904546955\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.4603174603174603,\n \"acc_stderr\": 0.04458029125470973,\n \"acc_norm\": 0.4603174603174603,\n \"acc_norm_stderr\": 0.04458029125470973\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.38,\n \"acc_stderr\": 0.048783173121456316,\n \"acc_norm\": 0.38,\n \"acc_norm_stderr\": 0.048783173121456316\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.7870967741935484,\n \"acc_stderr\": 0.023287665127268552,\n \"acc_norm\": 0.7870967741935484,\n \"acc_norm_stderr\": 0.023287665127268552\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.5221674876847291,\n \"acc_stderr\": 0.03514528562175007,\n \"acc_norm\": 0.5221674876847291,\n \"acc_norm_stderr\": 0.03514528562175007\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.66,\n \"acc_stderr\": 0.04760952285695237,\n \"acc_norm\": 0.66,\n \"acc_norm_stderr\": 0.04760952285695237\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.7818181818181819,\n \"acc_stderr\": 0.032250781083062896,\n \"acc_norm\": 0.7818181818181819,\n \"acc_norm_stderr\": 0.032250781083062896\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.803030303030303,\n \"acc_stderr\": 0.02833560973246336,\n \"acc_norm\": 0.803030303030303,\n \"acc_norm_stderr\": 0.02833560973246336\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.8860103626943006,\n \"acc_stderr\": 0.022935144053919443,\n \"acc_norm\": 0.8860103626943006,\n \"acc_norm_stderr\": 0.022935144053919443\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.6153846153846154,\n \"acc_stderr\": 0.024666744915187208,\n \"acc_norm\": 0.6153846153846154,\n \"acc_norm_stderr\": 0.024666744915187208\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.3,\n \"acc_stderr\": 0.027940457136228405,\n \"acc_norm\": 0.3,\n \"acc_norm_stderr\": 0.027940457136228405\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.6764705882352942,\n \"acc_stderr\": 0.030388353551886797,\n \"acc_norm\": 0.6764705882352942,\n \"acc_norm_stderr\": 0.030388353551886797\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.31788079470198677,\n \"acc_stderr\": 0.038020397601079024,\n \"acc_norm\": 0.31788079470198677,\n \"acc_norm_stderr\": 0.038020397601079024\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.8330275229357799,\n \"acc_stderr\": 0.01599015488507338,\n \"acc_norm\": 0.8330275229357799,\n \"acc_norm_stderr\": 0.01599015488507338\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.5138888888888888,\n \"acc_stderr\": 0.034086558679777494,\n \"acc_norm\": 0.5138888888888888,\n \"acc_norm_stderr\": 0.034086558679777494\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.7990196078431373,\n \"acc_stderr\": 0.02812597226565437,\n \"acc_norm\": 0.7990196078431373,\n \"acc_norm_stderr\": 0.02812597226565437\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.8185654008438819,\n \"acc_stderr\": 0.02508596114457966,\n \"acc_norm\": 0.8185654008438819,\n \"acc_norm_stderr\": 0.02508596114457966\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.7040358744394619,\n \"acc_stderr\": 0.030636591348699803,\n \"acc_norm\": 0.7040358744394619,\n \"acc_norm_stderr\": 0.030636591348699803\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.7862595419847328,\n \"acc_stderr\": 0.0359546161177469,\n \"acc_norm\": 0.7862595419847328,\n \"acc_norm_stderr\": 0.0359546161177469\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.7603305785123967,\n \"acc_stderr\": 0.03896878985070416,\n \"acc_norm\": 0.7603305785123967,\n \"acc_norm_stderr\": 0.03896878985070416\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.7870370370370371,\n \"acc_stderr\": 0.039578354719809805,\n \"acc_norm\": 0.7870370370370371,\n \"acc_norm_stderr\": 0.039578354719809805\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.7791411042944786,\n \"acc_stderr\": 0.03259177392742178,\n \"acc_norm\": 0.7791411042944786,\n \"acc_norm_stderr\": 0.03259177392742178\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.49107142857142855,\n \"acc_stderr\": 0.04745033255489123,\n \"acc_norm\": 0.49107142857142855,\n \"acc_norm_stderr\": 0.04745033255489123\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.7669902912621359,\n \"acc_stderr\": 0.04185832598928315,\n \"acc_norm\": 0.7669902912621359,\n \"acc_norm_stderr\": 0.04185832598928315\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8589743589743589,\n \"acc_stderr\": 0.022801382534597528,\n \"acc_norm\": 0.8589743589743589,\n \"acc_norm_stderr\": 0.022801382534597528\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.7,\n \"acc_stderr\": 0.046056618647183814,\n \"acc_norm\": 0.7,\n \"acc_norm_stderr\": 0.046056618647183814\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.8275862068965517,\n \"acc_stderr\": 0.013507943909371803,\n \"acc_norm\": 0.8275862068965517,\n \"acc_norm_stderr\": 0.013507943909371803\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.7138728323699421,\n \"acc_stderr\": 0.024332146779134128,\n \"acc_norm\": 0.7138728323699421,\n \"acc_norm_stderr\": 0.024332146779134128\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.3094972067039106,\n \"acc_stderr\": 0.015461169002371542,\n \"acc_norm\": 0.3094972067039106,\n \"acc_norm_stderr\": 0.015461169002371542\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.7549019607843137,\n \"acc_stderr\": 0.024630048979824782,\n \"acc_norm\": 0.7549019607843137,\n \"acc_norm_stderr\": 0.024630048979824782\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.6881028938906752,\n \"acc_stderr\": 0.02631185807185416,\n \"acc_norm\": 0.6881028938906752,\n \"acc_norm_stderr\": 0.02631185807185416\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.7530864197530864,\n \"acc_stderr\": 0.02399350170904211,\n \"acc_norm\": 0.7530864197530864,\n \"acc_norm_stderr\": 0.02399350170904211\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.5035460992907801,\n \"acc_stderr\": 0.02982674915328092,\n \"acc_norm\": 0.5035460992907801,\n \"acc_norm_stderr\": 0.02982674915328092\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.4661016949152542,\n \"acc_stderr\": 0.01274085387294983,\n \"acc_norm\": 0.4661016949152542,\n \"acc_norm_stderr\": 0.01274085387294983\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.6801470588235294,\n \"acc_stderr\": 0.028332959514031204,\n \"acc_norm\": 0.6801470588235294,\n \"acc_norm_stderr\": 0.028332959514031204\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.6699346405228758,\n \"acc_stderr\": 0.019023726160724553,\n \"acc_norm\": 0.6699346405228758,\n \"acc_norm_stderr\": 0.019023726160724553\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6454545454545455,\n \"acc_stderr\": 0.045820048415054174,\n \"acc_norm\": 0.6454545454545455,\n \"acc_norm_stderr\": 0.045820048415054174\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.7387755102040816,\n \"acc_stderr\": 0.028123429335142773,\n \"acc_norm\": 0.7387755102040816,\n \"acc_norm_stderr\": 0.028123429335142773\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.8159203980099502,\n \"acc_stderr\": 0.027403859410786845,\n \"acc_norm\": 0.8159203980099502,\n \"acc_norm_stderr\": 0.027403859410786845\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.87,\n \"acc_stderr\": 0.033799766898963086,\n \"acc_norm\": 0.87,\n \"acc_norm_stderr\": 0.033799766898963086\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5662650602409639,\n \"acc_stderr\": 0.03858158940685516,\n \"acc_norm\": 0.5662650602409639,\n \"acc_norm_stderr\": 0.03858158940685516\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.8304093567251462,\n \"acc_stderr\": 0.02878210810540171,\n \"acc_norm\": 0.8304093567251462,\n \"acc_norm_stderr\": 0.02878210810540171\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.35862913096695226,\n \"mc1_stderr\": 0.016789289499502022,\n \"mc2\": 0.5239619444151867,\n \"mc2_stderr\": 0.015249337191329784\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7790055248618785,\n \"acc_stderr\": 0.011661223637643412\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.55420773313116,\n \"acc_stderr\": 0.01369130517450669\n }\n}\n```", "repo_url": "https://huggingface.co/sonthenguyen/OpenHermes-2.5-Mistral-7B-mt-bench-DPO-original-v2", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_02_15T22_39_42.033476", "path": ["**/details_harness|arc:challenge|25_2024-02-15T22-39-42.033476.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-02-15T22-39-42.033476.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_02_15T22_39_42.033476", "path": ["**/details_harness|gsm8k|5_2024-02-15T22-39-42.033476.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-02-15T22-39-42.033476.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_02_15T22_39_42.033476", "path": ["**/details_harness|hellaswag|10_2024-02-15T22-39-42.033476.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-02-15T22-39-42.033476.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_02_15T22_39_42.033476", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-15T22-39-42.033476.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-02-15T22-39-42.033476.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-02-15T22-39-42.033476.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-02-15T22-39-42.033476.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-15T22-39-42.033476.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-02-15T22-39-42.033476.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-02-15T22-39-42.033476.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-02-15T22-39-42.033476.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-02-15T22-39-42.033476.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-02-15T22-39-42.033476.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-02-15T22-39-42.033476.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-02-15T22-39-42.033476.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-15T22-39-42.033476.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-02-15T22-39-42.033476.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-15T22-39-42.033476.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-15T22-39-42.033476.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-02-15T22-39-42.033476.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-02-15T22-39-42.033476.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-02-15T22-39-42.033476.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-15T22-39-42.033476.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-15T22-39-42.033476.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-15T22-39-42.033476.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-02-15T22-39-42.033476.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-15T22-39-42.033476.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-15T22-39-42.033476.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-15T22-39-42.033476.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-15T22-39-42.033476.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-02-15T22-39-42.033476.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-15T22-39-42.033476.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-15T22-39-42.033476.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-15T22-39-42.033476.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-15T22-39-42.033476.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-02-15T22-39-42.033476.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-02-15T22-39-42.033476.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-02-15T22-39-42.033476.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-02-15T22-39-42.033476.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-15T22-39-42.033476.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-02-15T22-39-42.033476.parquet", "**/details_harness|hendrycksTest-management|5_2024-02-15T22-39-42.033476.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-02-15T22-39-42.033476.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-02-15T22-39-42.033476.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-02-15T22-39-42.033476.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-02-15T22-39-42.033476.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-15T22-39-42.033476.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-02-15T22-39-42.033476.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-02-15T22-39-42.033476.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-02-15T22-39-42.033476.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-02-15T22-39-42.033476.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-02-15T22-39-42.033476.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-02-15T22-39-42.033476.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-02-15T22-39-42.033476.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-02-15T22-39-42.033476.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-02-15T22-39-42.033476.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-02-15T22-39-42.033476.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-15T22-39-42.033476.parquet", "**/details_harness|hendrycksTest-virology|5_2024-02-15T22-39-42.033476.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-02-15T22-39-42.033476.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-15T22-39-42.033476.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-02-15T22-39-42.033476.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-02-15T22-39-42.033476.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-02-15T22-39-42.033476.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-15T22-39-42.033476.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-02-15T22-39-42.033476.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-02-15T22-39-42.033476.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-02-15T22-39-42.033476.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-02-15T22-39-42.033476.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-02-15T22-39-42.033476.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-02-15T22-39-42.033476.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-02-15T22-39-42.033476.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-15T22-39-42.033476.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-02-15T22-39-42.033476.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-15T22-39-42.033476.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-15T22-39-42.033476.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-02-15T22-39-42.033476.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-02-15T22-39-42.033476.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-02-15T22-39-42.033476.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-15T22-39-42.033476.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-15T22-39-42.033476.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-15T22-39-42.033476.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-02-15T22-39-42.033476.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-15T22-39-42.033476.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-15T22-39-42.033476.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-15T22-39-42.033476.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-15T22-39-42.033476.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-02-15T22-39-42.033476.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-15T22-39-42.033476.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-15T22-39-42.033476.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-15T22-39-42.033476.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-15T22-39-42.033476.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-02-15T22-39-42.033476.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-02-15T22-39-42.033476.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-02-15T22-39-42.033476.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-02-15T22-39-42.033476.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-15T22-39-42.033476.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-02-15T22-39-42.033476.parquet", "**/details_harness|hendrycksTest-management|5_2024-02-15T22-39-42.033476.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-02-15T22-39-42.033476.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-02-15T22-39-42.033476.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-02-15T22-39-42.033476.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-02-15T22-39-42.033476.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-15T22-39-42.033476.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-02-15T22-39-42.033476.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-02-15T22-39-42.033476.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-02-15T22-39-42.033476.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-02-15T22-39-42.033476.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-02-15T22-39-42.033476.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-02-15T22-39-42.033476.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-02-15T22-39-42.033476.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-02-15T22-39-42.033476.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-02-15T22-39-42.033476.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-02-15T22-39-42.033476.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-15T22-39-42.033476.parquet", "**/details_harness|hendrycksTest-virology|5_2024-02-15T22-39-42.033476.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-02-15T22-39-42.033476.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_02_15T22_39_42.033476", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-15T22-39-42.033476.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-15T22-39-42.033476.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_02_15T22_39_42.033476", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-02-15T22-39-42.033476.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-02-15T22-39-42.033476.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_02_15T22_39_42.033476", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-02-15T22-39-42.033476.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-02-15T22-39-42.033476.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_02_15T22_39_42.033476", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-02-15T22-39-42.033476.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-02-15T22-39-42.033476.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_02_15T22_39_42.033476", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-15T22-39-42.033476.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-15T22-39-42.033476.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_02_15T22_39_42.033476", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-02-15T22-39-42.033476.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-02-15T22-39-42.033476.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_02_15T22_39_42.033476", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-02-15T22-39-42.033476.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-02-15T22-39-42.033476.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_02_15T22_39_42.033476", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-02-15T22-39-42.033476.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-02-15T22-39-42.033476.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_02_15T22_39_42.033476", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-02-15T22-39-42.033476.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-02-15T22-39-42.033476.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_02_15T22_39_42.033476", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-02-15T22-39-42.033476.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-02-15T22-39-42.033476.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_02_15T22_39_42.033476", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-02-15T22-39-42.033476.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-02-15T22-39-42.033476.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_02_15T22_39_42.033476", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-02-15T22-39-42.033476.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-02-15T22-39-42.033476.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_02_15T22_39_42.033476", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-15T22-39-42.033476.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-15T22-39-42.033476.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_02_15T22_39_42.033476", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-02-15T22-39-42.033476.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-02-15T22-39-42.033476.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_02_15T22_39_42.033476", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-15T22-39-42.033476.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-15T22-39-42.033476.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_02_15T22_39_42.033476", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-15T22-39-42.033476.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-15T22-39-42.033476.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_02_15T22_39_42.033476", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-02-15T22-39-42.033476.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-02-15T22-39-42.033476.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_02_15T22_39_42.033476", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-02-15T22-39-42.033476.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-02-15T22-39-42.033476.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_02_15T22_39_42.033476", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-02-15T22-39-42.033476.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-02-15T22-39-42.033476.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_02_15T22_39_42.033476", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-15T22-39-42.033476.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-15T22-39-42.033476.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_02_15T22_39_42.033476", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-15T22-39-42.033476.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-15T22-39-42.033476.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_02_15T22_39_42.033476", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-15T22-39-42.033476.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-15T22-39-42.033476.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_02_15T22_39_42.033476", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-02-15T22-39-42.033476.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-02-15T22-39-42.033476.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_02_15T22_39_42.033476", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-15T22-39-42.033476.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-15T22-39-42.033476.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_02_15T22_39_42.033476", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-15T22-39-42.033476.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-15T22-39-42.033476.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_02_15T22_39_42.033476", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-15T22-39-42.033476.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-15T22-39-42.033476.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_02_15T22_39_42.033476", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-15T22-39-42.033476.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-15T22-39-42.033476.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_02_15T22_39_42.033476", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-02-15T22-39-42.033476.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-02-15T22-39-42.033476.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_02_15T22_39_42.033476", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-15T22-39-42.033476.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-15T22-39-42.033476.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_02_15T22_39_42.033476", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-15T22-39-42.033476.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-15T22-39-42.033476.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_02_15T22_39_42.033476", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-15T22-39-42.033476.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-15T22-39-42.033476.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_02_15T22_39_42.033476", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-15T22-39-42.033476.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-15T22-39-42.033476.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_02_15T22_39_42.033476", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-02-15T22-39-42.033476.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-02-15T22-39-42.033476.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_02_15T22_39_42.033476", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-02-15T22-39-42.033476.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-02-15T22-39-42.033476.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_02_15T22_39_42.033476", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-02-15T22-39-42.033476.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-02-15T22-39-42.033476.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_02_15T22_39_42.033476", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-02-15T22-39-42.033476.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-02-15T22-39-42.033476.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_02_15T22_39_42.033476", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-15T22-39-42.033476.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-15T22-39-42.033476.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_02_15T22_39_42.033476", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-02-15T22-39-42.033476.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-02-15T22-39-42.033476.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_02_15T22_39_42.033476", "path": ["**/details_harness|hendrycksTest-management|5_2024-02-15T22-39-42.033476.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-02-15T22-39-42.033476.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_02_15T22_39_42.033476", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-02-15T22-39-42.033476.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-02-15T22-39-42.033476.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_02_15T22_39_42.033476", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-02-15T22-39-42.033476.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-02-15T22-39-42.033476.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_02_15T22_39_42.033476", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-02-15T22-39-42.033476.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-02-15T22-39-42.033476.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_02_15T22_39_42.033476", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-02-15T22-39-42.033476.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-02-15T22-39-42.033476.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_02_15T22_39_42.033476", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-15T22-39-42.033476.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-15T22-39-42.033476.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_02_15T22_39_42.033476", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-02-15T22-39-42.033476.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-02-15T22-39-42.033476.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_02_15T22_39_42.033476", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-02-15T22-39-42.033476.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-02-15T22-39-42.033476.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_02_15T22_39_42.033476", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-02-15T22-39-42.033476.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-02-15T22-39-42.033476.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_02_15T22_39_42.033476", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-02-15T22-39-42.033476.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-02-15T22-39-42.033476.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_02_15T22_39_42.033476", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-02-15T22-39-42.033476.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-02-15T22-39-42.033476.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_02_15T22_39_42.033476", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-02-15T22-39-42.033476.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-02-15T22-39-42.033476.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_02_15T22_39_42.033476", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-02-15T22-39-42.033476.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-02-15T22-39-42.033476.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_02_15T22_39_42.033476", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-02-15T22-39-42.033476.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-02-15T22-39-42.033476.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_02_15T22_39_42.033476", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-02-15T22-39-42.033476.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-02-15T22-39-42.033476.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_02_15T22_39_42.033476", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-02-15T22-39-42.033476.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-02-15T22-39-42.033476.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_02_15T22_39_42.033476", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-15T22-39-42.033476.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-15T22-39-42.033476.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_02_15T22_39_42.033476", "path": ["**/details_harness|hendrycksTest-virology|5_2024-02-15T22-39-42.033476.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-02-15T22-39-42.033476.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_02_15T22_39_42.033476", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-02-15T22-39-42.033476.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-02-15T22-39-42.033476.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_02_15T22_39_42.033476", "path": ["**/details_harness|truthfulqa:mc|0_2024-02-15T22-39-42.033476.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-02-15T22-39-42.033476.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_02_15T22_39_42.033476", "path": ["**/details_harness|winogrande|5_2024-02-15T22-39-42.033476.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-02-15T22-39-42.033476.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_02_15T22_39_42.033476", "path": ["results_2024-02-15T22-39-42.033476.parquet"]}, {"split": "latest", "path": ["results_2024-02-15T22-39-42.033476.parquet"]}]}]}
2024-02-15T22:42:22+00:00
18ca6e2c9c4f066c5e68020611e7e2f018f811ae
iNeil77/SLTrans
[ "task_categories:text-generation", "size_categories:1M<n<10M", "license:cc-by-nc-sa-4.0", "code", "region:us" ]
2024-02-15T22:48:03+00:00
{"license": "cc-by-nc-sa-4.0", "size_categories": ["1M<n<10M"], "task_categories": ["text-generation"], "tags": ["code"], "extra_gated_prompt": "You agree to not use the model to conduct experiments that cause harm to human subjects or generate malicious code.", "extra_gated_fields": {"Company": "text", "Country": "country", "Specific date": "date_picker", "I want to use this model for": {"type": "select", "options": ["Research", "Education", {"label": "Other", "value": "other"}]}, "I agree to use this model for non-commercial use ONLY": "checkbox"}, "dataset_info": [{"config_name": "C", "features": [{"name": "Source_Code", "dtype": "string"}, {"name": "IR_Original", "dtype": "string"}], "splits": [{"name": "Perf_Optimized", "num_bytes": 3383884149, "num_examples": 341419}, {"name": "Size_Optimized", "num_bytes": 2528286566, "num_examples": 341785}], "download_size": 1323447636, "dataset_size": 5912170715}, {"config_name": "C++", "features": [{"name": "Source_Code", "dtype": "string"}, {"name": "IR_Original", "dtype": "string"}], "splits": [{"name": "Perf_Optimized", "num_bytes": 116351369851, "num_examples": 2898509}, {"name": "Size_Optimized", "num_bytes": 92572469724, "num_examples": 2916655}], "download_size": 51690627847, "dataset_size": 208923839575}, {"config_name": "D", "features": [{"name": "Source_Code", "dtype": "string"}, {"name": "IR_Original", "dtype": "string"}], "splits": [{"name": "Perf_Optimized", "num_bytes": 2320830137, "num_examples": 7000}, {"name": "Size_Optimized", "num_bytes": 3271276765, "num_examples": 11054}], "download_size": 1316382832, "dataset_size": 5592106902}, {"config_name": "Fortran", "features": [{"name": "Source_Code", "dtype": "string"}, {"name": "IR_Original", "dtype": "string"}], "splits": [{"name": "Perf_Optimized", "num_bytes": 357741835, "num_examples": 6327}, {"name": "Size_Optimized", "num_bytes": 2320830137, "num_examples": 7000}], "download_size": 563853972, "dataset_size": 2678571972}, {"config_name": "Go", "features": [{"name": "Source_Code", "dtype": "string"}, {"name": "IR_Original", "dtype": "string"}], "splits": [{"name": "Perf_Optimized", "num_bytes": 819560767, "num_examples": 3913}, {"name": "Size_Optimized", "num_bytes": 741733997, "num_examples": 3925}], "download_size": 317182680, "dataset_size": 1561294764}, {"config_name": "Haskell", "features": [{"name": "Source_Code", "dtype": "string"}, {"name": "IR_Original", "dtype": "string"}], "splits": [{"name": "Perf_Optimized", "num_bytes": 3838556743, "num_examples": 27892}, {"name": "Size_Optimized", "num_bytes": 3667186152, "num_examples": 28203}], "download_size": 1736729352, "dataset_size": 7505742895}, {"config_name": "Nim", "features": [{"name": "Source_Code", "dtype": "string"}, {"name": "IR_Original", "dtype": "string"}], "splits": [{"name": "Size_Optimized", "num_bytes": 106424381, "num_examples": 215}], "download_size": 22506456, "dataset_size": 106424381}, {"config_name": "Objective-C", "features": [{"name": "Source_Code", "dtype": "string"}, {"name": "IR_Original", "dtype": "string"}], "splits": [{"name": "Perf_Optimized", "num_bytes": 1729045, "num_examples": 283}, {"name": "Size_Optimized", "num_bytes": 1433377, "num_examples": 283}], "download_size": 707508, "dataset_size": 3162422}, {"config_name": "Python", "features": [{"name": "Source_Code", "dtype": "string"}, {"name": "IR_Original", "dtype": "string"}], "splits": [{"name": "Perf_Optimized", "num_bytes": 13118428652, "num_examples": 154507}, {"name": "Size_Optimized", "num_bytes": 13118428652, "num_examples": 154507}], "download_size": 6511950536, "dataset_size": 26236857304}, {"config_name": "Rust", "features": [{"name": "Source_Code", "dtype": "string"}, {"name": "IR_Original", "dtype": "string"}], "splits": [{"name": "Perf_Optimized", "num_bytes": 5859467468, "num_examples": 38323}, {"name": "Size_Optimized", "num_bytes": 8695405064, "num_examples": 32720}], "download_size": 5326634011, "dataset_size": 14554872532}, {"config_name": "Swift", "features": [{"name": "Source_Code", "dtype": "string"}, {"name": "IR_Original", "dtype": "string"}], "splits": [{"name": "Perf_Optimized", "num_bytes": 260013963, "num_examples": 2003}, {"name": "Size_Optimized", "num_bytes": 266356839, "num_examples": 2015}], "download_size": 144113584, "dataset_size": 526370802}], "configs": [{"config_name": "C", "data_files": [{"split": "Perf_Optimized", "path": "C/Perf_Optimized-*"}, {"split": "Size_Optimized", "path": "C/Size_Optimized-*"}]}, {"config_name": "C++", "data_files": [{"split": "Perf_Optimized", "path": "C++/Perf_Optimized-*"}, {"split": "Size_Optimized", "path": "C++/Size_Optimized-*"}]}, {"config_name": "D", "data_files": [{"split": "Perf_Optimized", "path": "D/Perf_Optimized-*"}, {"split": "Size_Optimized", "path": "D/Size_Optimized-*"}]}, {"config_name": "Fortran", "data_files": [{"split": "Perf_Optimized", "path": "Fortran/Perf_Optimized-*"}, {"split": "Size_Optimized", "path": "Fortran/Size_Optimized-*"}]}, {"config_name": "Go", "data_files": [{"split": "Perf_Optimized", "path": "Go/Perf_Optimized-*"}, {"split": "Size_Optimized", "path": "Go/Size_Optimized-*"}]}, {"config_name": "Haskell", "data_files": [{"split": "Perf_Optimized", "path": "Haskell/Perf_Optimized-*"}, {"split": "Size_Optimized", "path": "Haskell/Size_Optimized-*"}]}, {"config_name": "Nim", "data_files": [{"split": "Size_Optimized", "path": "Nim/Size_Optimized-*"}]}, {"config_name": "Objective-C", "data_files": [{"split": "Perf_Optimized", "path": "Objective-C/Perf_Optimized-*"}, {"split": "Size_Optimized", "path": "Objective-C/Size_Optimized-*"}]}, {"config_name": "Python", "data_files": [{"split": "Perf_Optimized", "path": "Python/Perf_Optimized-*"}, {"split": "Size_Optimized", "path": "Python/Size_Optimized-*"}]}, {"config_name": "Rust", "data_files": [{"split": "Perf_Optimized", "path": "Rust/Perf_Optimized-*"}, {"split": "Size_Optimized", "path": "Rust/Size_Optimized-*"}]}, {"config_name": "Swift", "data_files": [{"split": "Perf_Optimized", "path": "Swift/Perf_Optimized-*"}, {"split": "Size_Optimized", "path": "Swift/Size_Optimized-*"}]}]}
2024-02-16T01:02:34+00:00
51831028aba9e7579e7314ddeed814fea75af7b2
eagle0504/larkin-web-scrape-dataset-qa-formatted
[ "region:us" ]
2024-02-15T23:02:44+00:00
{"dataset_info": {"features": [{"name": "questions", "dtype": "string"}, {"name": "answers", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 115322, "num_examples": 568}], "download_size": 62490, "dataset_size": 115322}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]}
2024-02-15T23:02:45+00:00
e998248084186177e8803376dbcf3e8967925bc3
# Dataset Card for Evaluation run of Kquant03/Buttercup-V2-bf16 <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [Kquant03/Buttercup-V2-bf16](https://huggingface.co/Kquant03/Buttercup-V2-bf16) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_Kquant03__Buttercup-V2-bf16", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-02-15T23:01:38.445097](https://huggingface.co/datasets/open-llm-leaderboard/details_Kquant03__Buttercup-V2-bf16/blob/main/results_2024-02-15T23-01-38.445097.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.6530538161665846, "acc_stderr": 0.031991249389942286, "acc_norm": 0.6524163789680969, "acc_norm_stderr": 0.03266539743840073, "mc1": 0.554467564259486, "mc1_stderr": 0.01739933528014034, "mc2": 0.6947306262348207, "mc2_stderr": 0.015031157853542046 }, "harness|arc:challenge|25": { "acc": 0.7039249146757679, "acc_stderr": 0.013340916085246258, "acc_norm": 0.7372013651877133, "acc_norm_stderr": 0.012862523175351335 }, "harness|hellaswag|10": { "acc": 0.7112129057956582, "acc_stderr": 0.004522725412556955, "acc_norm": 0.885381398127863, "acc_norm_stderr": 0.003179100565887989 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.33, "acc_stderr": 0.047258156262526045, "acc_norm": 0.33, "acc_norm_stderr": 0.047258156262526045 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.6518518518518519, "acc_stderr": 0.041153246103369526, "acc_norm": 0.6518518518518519, "acc_norm_stderr": 0.041153246103369526 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.7039473684210527, "acc_stderr": 0.03715062154998904, "acc_norm": 0.7039473684210527, "acc_norm_stderr": 0.03715062154998904 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.64, "acc_stderr": 0.04824181513244218, "acc_norm": 0.64, "acc_norm_stderr": 0.04824181513244218 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.7018867924528301, "acc_stderr": 0.02815283794249387, "acc_norm": 0.7018867924528301, "acc_norm_stderr": 0.02815283794249387 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.7708333333333334, "acc_stderr": 0.03514697467862388, "acc_norm": 0.7708333333333334, "acc_norm_stderr": 0.03514697467862388 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.45, "acc_stderr": 0.05, "acc_norm": 0.45, "acc_norm_stderr": 0.05 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.54, "acc_stderr": 0.05009082659620333, "acc_norm": 0.54, "acc_norm_stderr": 0.05009082659620333 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.29, "acc_stderr": 0.04560480215720684, "acc_norm": 0.29, "acc_norm_stderr": 0.04560480215720684 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.6647398843930635, "acc_stderr": 0.03599586301247077, "acc_norm": 0.6647398843930635, "acc_norm_stderr": 0.03599586301247077 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.4117647058823529, "acc_stderr": 0.048971049527263666, "acc_norm": 0.4117647058823529, "acc_norm_stderr": 0.048971049527263666 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.77, "acc_stderr": 0.04229525846816506, "acc_norm": 0.77, "acc_norm_stderr": 0.04229525846816506 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.5617021276595745, "acc_stderr": 0.03243618636108101, "acc_norm": 0.5617021276595745, "acc_norm_stderr": 0.03243618636108101 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.5087719298245614, "acc_stderr": 0.04702880432049615, "acc_norm": 0.5087719298245614, "acc_norm_stderr": 0.04702880432049615 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.5724137931034483, "acc_stderr": 0.04122737111370333, "acc_norm": 0.5724137931034483, "acc_norm_stderr": 0.04122737111370333 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.4470899470899471, "acc_stderr": 0.025606723995777025, "acc_norm": 0.4470899470899471, "acc_norm_stderr": 0.025606723995777025 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.47619047619047616, "acc_stderr": 0.04467062628403273, "acc_norm": 0.47619047619047616, "acc_norm_stderr": 0.04467062628403273 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.31, "acc_stderr": 0.04648231987117316, "acc_norm": 0.31, "acc_norm_stderr": 0.04648231987117316 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.7870967741935484, "acc_stderr": 0.02328766512726854, "acc_norm": 0.7870967741935484, "acc_norm_stderr": 0.02328766512726854 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.5073891625615764, "acc_stderr": 0.035176035403610105, "acc_norm": 0.5073891625615764, "acc_norm_stderr": 0.035176035403610105 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.68, "acc_stderr": 0.04688261722621505, "acc_norm": 0.68, "acc_norm_stderr": 0.04688261722621505 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.7818181818181819, "acc_stderr": 0.03225078108306289, "acc_norm": 0.7818181818181819, "acc_norm_stderr": 0.03225078108306289 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.8181818181818182, "acc_stderr": 0.027479603010538797, "acc_norm": 0.8181818181818182, "acc_norm_stderr": 0.027479603010538797 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.9015544041450777, "acc_stderr": 0.021500249576033456, "acc_norm": 0.9015544041450777, "acc_norm_stderr": 0.021500249576033456 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.6615384615384615, "acc_stderr": 0.023991500500313036, "acc_norm": 0.6615384615384615, "acc_norm_stderr": 0.023991500500313036 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.35185185185185186, "acc_stderr": 0.029116617606083008, "acc_norm": 0.35185185185185186, "acc_norm_stderr": 0.029116617606083008 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.6890756302521008, "acc_stderr": 0.03006676158297794, "acc_norm": 0.6890756302521008, "acc_norm_stderr": 0.03006676158297794 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.36423841059602646, "acc_stderr": 0.03929111781242742, "acc_norm": 0.36423841059602646, "acc_norm_stderr": 0.03929111781242742 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.8422018348623853, "acc_stderr": 0.015630022970092448, "acc_norm": 0.8422018348623853, "acc_norm_stderr": 0.015630022970092448 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.5, "acc_stderr": 0.034099716973523674, "acc_norm": 0.5, "acc_norm_stderr": 0.034099716973523674 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.8333333333333334, "acc_stderr": 0.026156867523931045, "acc_norm": 0.8333333333333334, "acc_norm_stderr": 0.026156867523931045 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.8016877637130801, "acc_stderr": 0.02595502084162113, "acc_norm": 0.8016877637130801, "acc_norm_stderr": 0.02595502084162113 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.6860986547085202, "acc_stderr": 0.031146796482972465, "acc_norm": 0.6860986547085202, "acc_norm_stderr": 0.031146796482972465 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.7938931297709924, "acc_stderr": 0.03547771004159463, "acc_norm": 0.7938931297709924, "acc_norm_stderr": 0.03547771004159463 }, "harness|hendrycksTest-international_law|5": { "acc": 0.8016528925619835, "acc_stderr": 0.03640118271990946, "acc_norm": 0.8016528925619835, "acc_norm_stderr": 0.03640118271990946 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.7592592592592593, "acc_stderr": 0.04133119440243839, "acc_norm": 0.7592592592592593, "acc_norm_stderr": 0.04133119440243839 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.7607361963190185, "acc_stderr": 0.033519538795212696, "acc_norm": 0.7607361963190185, "acc_norm_stderr": 0.033519538795212696 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.4017857142857143, "acc_stderr": 0.04653333146973646, "acc_norm": 0.4017857142857143, "acc_norm_stderr": 0.04653333146973646 }, "harness|hendrycksTest-management|5": { "acc": 0.7669902912621359, "acc_stderr": 0.04185832598928315, "acc_norm": 0.7669902912621359, "acc_norm_stderr": 0.04185832598928315 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8888888888888888, "acc_stderr": 0.020588491316092368, "acc_norm": 0.8888888888888888, "acc_norm_stderr": 0.020588491316092368 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.74, "acc_stderr": 0.04408440022768078, "acc_norm": 0.74, "acc_norm_stderr": 0.04408440022768078 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.8339719029374202, "acc_stderr": 0.0133064782430663, "acc_norm": 0.8339719029374202, "acc_norm_stderr": 0.0133064782430663 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.7341040462427746, "acc_stderr": 0.02378620325550829, "acc_norm": 0.7341040462427746, "acc_norm_stderr": 0.02378620325550829 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.42793296089385474, "acc_stderr": 0.01654788799741611, "acc_norm": 0.42793296089385474, "acc_norm_stderr": 0.01654788799741611 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.7189542483660131, "acc_stderr": 0.025738854797818733, "acc_norm": 0.7189542483660131, "acc_norm_stderr": 0.025738854797818733 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.7170418006430869, "acc_stderr": 0.02558306248998481, "acc_norm": 0.7170418006430869, "acc_norm_stderr": 0.02558306248998481 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.7530864197530864, "acc_stderr": 0.023993501709042103, "acc_norm": 0.7530864197530864, "acc_norm_stderr": 0.023993501709042103 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.48936170212765956, "acc_stderr": 0.02982074719142248, "acc_norm": 0.48936170212765956, "acc_norm_stderr": 0.02982074719142248 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.46740547588005216, "acc_stderr": 0.01274307294265335, "acc_norm": 0.46740547588005216, "acc_norm_stderr": 0.01274307294265335 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.6838235294117647, "acc_stderr": 0.028245687391462923, "acc_norm": 0.6838235294117647, "acc_norm_stderr": 0.028245687391462923 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.6633986928104575, "acc_stderr": 0.019117213911495148, "acc_norm": 0.6633986928104575, "acc_norm_stderr": 0.019117213911495148 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6636363636363637, "acc_stderr": 0.04525393596302506, "acc_norm": 0.6636363636363637, "acc_norm_stderr": 0.04525393596302506 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.726530612244898, "acc_stderr": 0.028535560337128448, "acc_norm": 0.726530612244898, "acc_norm_stderr": 0.028535560337128448 }, "harness|hendrycksTest-sociology|5": { "acc": 0.835820895522388, "acc_stderr": 0.026193923544454115, "acc_norm": 0.835820895522388, "acc_norm_stderr": 0.026193923544454115 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.85, "acc_stderr": 0.0358870281282637, "acc_norm": 0.85, "acc_norm_stderr": 0.0358870281282637 }, "harness|hendrycksTest-virology|5": { "acc": 0.5481927710843374, "acc_stderr": 0.03874371556587953, "acc_norm": 0.5481927710843374, "acc_norm_stderr": 0.03874371556587953 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.8362573099415205, "acc_stderr": 0.028380919596145866, "acc_norm": 0.8362573099415205, "acc_norm_stderr": 0.028380919596145866 }, "harness|truthfulqa:mc|0": { "mc1": 0.554467564259486, "mc1_stderr": 0.01739933528014034, "mc2": 0.6947306262348207, "mc2_stderr": 0.015031157853542046 }, "harness|winogrande|5": { "acc": 0.8650355169692187, "acc_stderr": 0.009603064913219049 }, "harness|gsm8k|5": { "acc": 0.686125852918878, "acc_stderr": 0.012782681251053198 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_Kquant03__Buttercup-V2-bf16
[ "region:us" ]
2024-02-15T23:03:54+00:00
{"pretty_name": "Evaluation run of Kquant03/Buttercup-V2-bf16", "dataset_summary": "Dataset automatically created during the evaluation run of model [Kquant03/Buttercup-V2-bf16](https://huggingface.co/Kquant03/Buttercup-V2-bf16) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_Kquant03__Buttercup-V2-bf16\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-02-15T23:01:38.445097](https://huggingface.co/datasets/open-llm-leaderboard/details_Kquant03__Buttercup-V2-bf16/blob/main/results_2024-02-15T23-01-38.445097.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.6530538161665846,\n \"acc_stderr\": 0.031991249389942286,\n \"acc_norm\": 0.6524163789680969,\n \"acc_norm_stderr\": 0.03266539743840073,\n \"mc1\": 0.554467564259486,\n \"mc1_stderr\": 0.01739933528014034,\n \"mc2\": 0.6947306262348207,\n \"mc2_stderr\": 0.015031157853542046\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.7039249146757679,\n \"acc_stderr\": 0.013340916085246258,\n \"acc_norm\": 0.7372013651877133,\n \"acc_norm_stderr\": 0.012862523175351335\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.7112129057956582,\n \"acc_stderr\": 0.004522725412556955,\n \"acc_norm\": 0.885381398127863,\n \"acc_norm_stderr\": 0.003179100565887989\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.33,\n \"acc_stderr\": 0.047258156262526045,\n \"acc_norm\": 0.33,\n \"acc_norm_stderr\": 0.047258156262526045\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.6518518518518519,\n \"acc_stderr\": 0.041153246103369526,\n \"acc_norm\": 0.6518518518518519,\n \"acc_norm_stderr\": 0.041153246103369526\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.7039473684210527,\n \"acc_stderr\": 0.03715062154998904,\n \"acc_norm\": 0.7039473684210527,\n \"acc_norm_stderr\": 0.03715062154998904\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.64,\n \"acc_stderr\": 0.04824181513244218,\n \"acc_norm\": 0.64,\n \"acc_norm_stderr\": 0.04824181513244218\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.7018867924528301,\n \"acc_stderr\": 0.02815283794249387,\n \"acc_norm\": 0.7018867924528301,\n \"acc_norm_stderr\": 0.02815283794249387\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.7708333333333334,\n \"acc_stderr\": 0.03514697467862388,\n \"acc_norm\": 0.7708333333333334,\n \"acc_norm_stderr\": 0.03514697467862388\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.45,\n \"acc_stderr\": 0.05,\n \"acc_norm\": 0.45,\n \"acc_norm_stderr\": 0.05\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.54,\n \"acc_stderr\": 0.05009082659620333,\n \"acc_norm\": 0.54,\n \"acc_norm_stderr\": 0.05009082659620333\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.29,\n \"acc_stderr\": 0.04560480215720684,\n \"acc_norm\": 0.29,\n \"acc_norm_stderr\": 0.04560480215720684\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.6647398843930635,\n \"acc_stderr\": 0.03599586301247077,\n \"acc_norm\": 0.6647398843930635,\n \"acc_norm_stderr\": 0.03599586301247077\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.4117647058823529,\n \"acc_stderr\": 0.048971049527263666,\n \"acc_norm\": 0.4117647058823529,\n \"acc_norm_stderr\": 0.048971049527263666\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.77,\n \"acc_stderr\": 0.04229525846816506,\n \"acc_norm\": 0.77,\n \"acc_norm_stderr\": 0.04229525846816506\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.5617021276595745,\n \"acc_stderr\": 0.03243618636108101,\n \"acc_norm\": 0.5617021276595745,\n \"acc_norm_stderr\": 0.03243618636108101\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.5087719298245614,\n \"acc_stderr\": 0.04702880432049615,\n \"acc_norm\": 0.5087719298245614,\n \"acc_norm_stderr\": 0.04702880432049615\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.5724137931034483,\n \"acc_stderr\": 0.04122737111370333,\n \"acc_norm\": 0.5724137931034483,\n \"acc_norm_stderr\": 0.04122737111370333\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.4470899470899471,\n \"acc_stderr\": 0.025606723995777025,\n \"acc_norm\": 0.4470899470899471,\n \"acc_norm_stderr\": 0.025606723995777025\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.47619047619047616,\n \"acc_stderr\": 0.04467062628403273,\n \"acc_norm\": 0.47619047619047616,\n \"acc_norm_stderr\": 0.04467062628403273\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.31,\n \"acc_stderr\": 0.04648231987117316,\n \"acc_norm\": 0.31,\n \"acc_norm_stderr\": 0.04648231987117316\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.7870967741935484,\n \"acc_stderr\": 0.02328766512726854,\n \"acc_norm\": 0.7870967741935484,\n \"acc_norm_stderr\": 0.02328766512726854\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.5073891625615764,\n \"acc_stderr\": 0.035176035403610105,\n \"acc_norm\": 0.5073891625615764,\n \"acc_norm_stderr\": 0.035176035403610105\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.68,\n \"acc_stderr\": 0.04688261722621505,\n \"acc_norm\": 0.68,\n \"acc_norm_stderr\": 0.04688261722621505\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.7818181818181819,\n \"acc_stderr\": 0.03225078108306289,\n \"acc_norm\": 0.7818181818181819,\n \"acc_norm_stderr\": 0.03225078108306289\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.8181818181818182,\n \"acc_stderr\": 0.027479603010538797,\n \"acc_norm\": 0.8181818181818182,\n \"acc_norm_stderr\": 0.027479603010538797\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.9015544041450777,\n \"acc_stderr\": 0.021500249576033456,\n \"acc_norm\": 0.9015544041450777,\n \"acc_norm_stderr\": 0.021500249576033456\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.6615384615384615,\n \"acc_stderr\": 0.023991500500313036,\n \"acc_norm\": 0.6615384615384615,\n \"acc_norm_stderr\": 0.023991500500313036\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.35185185185185186,\n \"acc_stderr\": 0.029116617606083008,\n \"acc_norm\": 0.35185185185185186,\n \"acc_norm_stderr\": 0.029116617606083008\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.6890756302521008,\n \"acc_stderr\": 0.03006676158297794,\n \"acc_norm\": 0.6890756302521008,\n \"acc_norm_stderr\": 0.03006676158297794\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.36423841059602646,\n \"acc_stderr\": 0.03929111781242742,\n \"acc_norm\": 0.36423841059602646,\n \"acc_norm_stderr\": 0.03929111781242742\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.8422018348623853,\n \"acc_stderr\": 0.015630022970092448,\n \"acc_norm\": 0.8422018348623853,\n \"acc_norm_stderr\": 0.015630022970092448\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.5,\n \"acc_stderr\": 0.034099716973523674,\n \"acc_norm\": 0.5,\n \"acc_norm_stderr\": 0.034099716973523674\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.8333333333333334,\n \"acc_stderr\": 0.026156867523931045,\n \"acc_norm\": 0.8333333333333334,\n \"acc_norm_stderr\": 0.026156867523931045\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.8016877637130801,\n \"acc_stderr\": 0.02595502084162113,\n \"acc_norm\": 0.8016877637130801,\n \"acc_norm_stderr\": 0.02595502084162113\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.6860986547085202,\n \"acc_stderr\": 0.031146796482972465,\n \"acc_norm\": 0.6860986547085202,\n \"acc_norm_stderr\": 0.031146796482972465\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.7938931297709924,\n \"acc_stderr\": 0.03547771004159463,\n \"acc_norm\": 0.7938931297709924,\n \"acc_norm_stderr\": 0.03547771004159463\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.8016528925619835,\n \"acc_stderr\": 0.03640118271990946,\n \"acc_norm\": 0.8016528925619835,\n \"acc_norm_stderr\": 0.03640118271990946\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.7592592592592593,\n \"acc_stderr\": 0.04133119440243839,\n \"acc_norm\": 0.7592592592592593,\n \"acc_norm_stderr\": 0.04133119440243839\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.7607361963190185,\n \"acc_stderr\": 0.033519538795212696,\n \"acc_norm\": 0.7607361963190185,\n \"acc_norm_stderr\": 0.033519538795212696\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.4017857142857143,\n \"acc_stderr\": 0.04653333146973646,\n \"acc_norm\": 0.4017857142857143,\n \"acc_norm_stderr\": 0.04653333146973646\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.7669902912621359,\n \"acc_stderr\": 0.04185832598928315,\n \"acc_norm\": 0.7669902912621359,\n \"acc_norm_stderr\": 0.04185832598928315\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8888888888888888,\n \"acc_stderr\": 0.020588491316092368,\n \"acc_norm\": 0.8888888888888888,\n \"acc_norm_stderr\": 0.020588491316092368\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.74,\n \"acc_stderr\": 0.04408440022768078,\n \"acc_norm\": 0.74,\n \"acc_norm_stderr\": 0.04408440022768078\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.8339719029374202,\n \"acc_stderr\": 0.0133064782430663,\n \"acc_norm\": 0.8339719029374202,\n \"acc_norm_stderr\": 0.0133064782430663\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.7341040462427746,\n \"acc_stderr\": 0.02378620325550829,\n \"acc_norm\": 0.7341040462427746,\n \"acc_norm_stderr\": 0.02378620325550829\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.42793296089385474,\n \"acc_stderr\": 0.01654788799741611,\n \"acc_norm\": 0.42793296089385474,\n \"acc_norm_stderr\": 0.01654788799741611\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.7189542483660131,\n \"acc_stderr\": 0.025738854797818733,\n \"acc_norm\": 0.7189542483660131,\n \"acc_norm_stderr\": 0.025738854797818733\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.7170418006430869,\n \"acc_stderr\": 0.02558306248998481,\n \"acc_norm\": 0.7170418006430869,\n \"acc_norm_stderr\": 0.02558306248998481\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.7530864197530864,\n \"acc_stderr\": 0.023993501709042103,\n \"acc_norm\": 0.7530864197530864,\n \"acc_norm_stderr\": 0.023993501709042103\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.48936170212765956,\n \"acc_stderr\": 0.02982074719142248,\n \"acc_norm\": 0.48936170212765956,\n \"acc_norm_stderr\": 0.02982074719142248\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.46740547588005216,\n \"acc_stderr\": 0.01274307294265335,\n \"acc_norm\": 0.46740547588005216,\n \"acc_norm_stderr\": 0.01274307294265335\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.6838235294117647,\n \"acc_stderr\": 0.028245687391462923,\n \"acc_norm\": 0.6838235294117647,\n \"acc_norm_stderr\": 0.028245687391462923\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.6633986928104575,\n \"acc_stderr\": 0.019117213911495148,\n \"acc_norm\": 0.6633986928104575,\n \"acc_norm_stderr\": 0.019117213911495148\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6636363636363637,\n \"acc_stderr\": 0.04525393596302506,\n \"acc_norm\": 0.6636363636363637,\n \"acc_norm_stderr\": 0.04525393596302506\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.726530612244898,\n \"acc_stderr\": 0.028535560337128448,\n \"acc_norm\": 0.726530612244898,\n \"acc_norm_stderr\": 0.028535560337128448\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.835820895522388,\n \"acc_stderr\": 0.026193923544454115,\n \"acc_norm\": 0.835820895522388,\n \"acc_norm_stderr\": 0.026193923544454115\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.85,\n \"acc_stderr\": 0.0358870281282637,\n \"acc_norm\": 0.85,\n \"acc_norm_stderr\": 0.0358870281282637\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5481927710843374,\n \"acc_stderr\": 0.03874371556587953,\n \"acc_norm\": 0.5481927710843374,\n \"acc_norm_stderr\": 0.03874371556587953\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.8362573099415205,\n \"acc_stderr\": 0.028380919596145866,\n \"acc_norm\": 0.8362573099415205,\n \"acc_norm_stderr\": 0.028380919596145866\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.554467564259486,\n \"mc1_stderr\": 0.01739933528014034,\n \"mc2\": 0.6947306262348207,\n \"mc2_stderr\": 0.015031157853542046\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.8650355169692187,\n \"acc_stderr\": 0.009603064913219049\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.686125852918878,\n \"acc_stderr\": 0.012782681251053198\n }\n}\n```", "repo_url": "https://huggingface.co/Kquant03/Buttercup-V2-bf16", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_02_15T23_01_38.445097", "path": ["**/details_harness|arc:challenge|25_2024-02-15T23-01-38.445097.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-02-15T23-01-38.445097.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_02_15T23_01_38.445097", "path": ["**/details_harness|gsm8k|5_2024-02-15T23-01-38.445097.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-02-15T23-01-38.445097.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_02_15T23_01_38.445097", "path": ["**/details_harness|hellaswag|10_2024-02-15T23-01-38.445097.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-02-15T23-01-38.445097.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_02_15T23_01_38.445097", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-15T23-01-38.445097.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-02-15T23-01-38.445097.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-02-15T23-01-38.445097.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-02-15T23-01-38.445097.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-15T23-01-38.445097.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-02-15T23-01-38.445097.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-02-15T23-01-38.445097.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-02-15T23-01-38.445097.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-02-15T23-01-38.445097.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-02-15T23-01-38.445097.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-02-15T23-01-38.445097.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-02-15T23-01-38.445097.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-15T23-01-38.445097.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-02-15T23-01-38.445097.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-15T23-01-38.445097.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-15T23-01-38.445097.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-02-15T23-01-38.445097.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-02-15T23-01-38.445097.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-02-15T23-01-38.445097.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-15T23-01-38.445097.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-15T23-01-38.445097.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-15T23-01-38.445097.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-02-15T23-01-38.445097.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-15T23-01-38.445097.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-15T23-01-38.445097.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-15T23-01-38.445097.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-15T23-01-38.445097.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-02-15T23-01-38.445097.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-15T23-01-38.445097.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-15T23-01-38.445097.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-15T23-01-38.445097.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-15T23-01-38.445097.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-02-15T23-01-38.445097.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-02-15T23-01-38.445097.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-02-15T23-01-38.445097.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-02-15T23-01-38.445097.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-15T23-01-38.445097.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-02-15T23-01-38.445097.parquet", "**/details_harness|hendrycksTest-management|5_2024-02-15T23-01-38.445097.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-02-15T23-01-38.445097.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-02-15T23-01-38.445097.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-02-15T23-01-38.445097.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-02-15T23-01-38.445097.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-15T23-01-38.445097.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-02-15T23-01-38.445097.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-02-15T23-01-38.445097.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-02-15T23-01-38.445097.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-02-15T23-01-38.445097.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-02-15T23-01-38.445097.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-02-15T23-01-38.445097.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-02-15T23-01-38.445097.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-02-15T23-01-38.445097.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-02-15T23-01-38.445097.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-02-15T23-01-38.445097.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-15T23-01-38.445097.parquet", "**/details_harness|hendrycksTest-virology|5_2024-02-15T23-01-38.445097.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-02-15T23-01-38.445097.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-15T23-01-38.445097.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-02-15T23-01-38.445097.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-02-15T23-01-38.445097.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-02-15T23-01-38.445097.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-15T23-01-38.445097.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-02-15T23-01-38.445097.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-02-15T23-01-38.445097.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-02-15T23-01-38.445097.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-02-15T23-01-38.445097.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-02-15T23-01-38.445097.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-02-15T23-01-38.445097.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-02-15T23-01-38.445097.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-15T23-01-38.445097.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-02-15T23-01-38.445097.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-15T23-01-38.445097.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-15T23-01-38.445097.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-02-15T23-01-38.445097.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-02-15T23-01-38.445097.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-02-15T23-01-38.445097.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-15T23-01-38.445097.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-15T23-01-38.445097.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-15T23-01-38.445097.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-02-15T23-01-38.445097.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-15T23-01-38.445097.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-15T23-01-38.445097.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-15T23-01-38.445097.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-15T23-01-38.445097.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-02-15T23-01-38.445097.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-15T23-01-38.445097.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-15T23-01-38.445097.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-15T23-01-38.445097.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-15T23-01-38.445097.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-02-15T23-01-38.445097.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-02-15T23-01-38.445097.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-02-15T23-01-38.445097.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-02-15T23-01-38.445097.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-15T23-01-38.445097.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-02-15T23-01-38.445097.parquet", "**/details_harness|hendrycksTest-management|5_2024-02-15T23-01-38.445097.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-02-15T23-01-38.445097.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-02-15T23-01-38.445097.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-02-15T23-01-38.445097.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-02-15T23-01-38.445097.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-15T23-01-38.445097.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-02-15T23-01-38.445097.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-02-15T23-01-38.445097.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-02-15T23-01-38.445097.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-02-15T23-01-38.445097.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-02-15T23-01-38.445097.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-02-15T23-01-38.445097.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-02-15T23-01-38.445097.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-02-15T23-01-38.445097.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-02-15T23-01-38.445097.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-02-15T23-01-38.445097.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-15T23-01-38.445097.parquet", "**/details_harness|hendrycksTest-virology|5_2024-02-15T23-01-38.445097.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-02-15T23-01-38.445097.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_02_15T23_01_38.445097", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-15T23-01-38.445097.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-15T23-01-38.445097.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_02_15T23_01_38.445097", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-02-15T23-01-38.445097.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-02-15T23-01-38.445097.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_02_15T23_01_38.445097", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-02-15T23-01-38.445097.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-02-15T23-01-38.445097.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_02_15T23_01_38.445097", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-02-15T23-01-38.445097.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-02-15T23-01-38.445097.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_02_15T23_01_38.445097", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-15T23-01-38.445097.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-15T23-01-38.445097.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_02_15T23_01_38.445097", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-02-15T23-01-38.445097.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-02-15T23-01-38.445097.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_02_15T23_01_38.445097", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-02-15T23-01-38.445097.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-02-15T23-01-38.445097.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_02_15T23_01_38.445097", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-02-15T23-01-38.445097.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-02-15T23-01-38.445097.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_02_15T23_01_38.445097", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-02-15T23-01-38.445097.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-02-15T23-01-38.445097.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_02_15T23_01_38.445097", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-02-15T23-01-38.445097.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-02-15T23-01-38.445097.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_02_15T23_01_38.445097", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-02-15T23-01-38.445097.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-02-15T23-01-38.445097.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_02_15T23_01_38.445097", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-02-15T23-01-38.445097.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-02-15T23-01-38.445097.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_02_15T23_01_38.445097", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-15T23-01-38.445097.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-15T23-01-38.445097.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_02_15T23_01_38.445097", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-02-15T23-01-38.445097.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-02-15T23-01-38.445097.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_02_15T23_01_38.445097", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-15T23-01-38.445097.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-15T23-01-38.445097.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_02_15T23_01_38.445097", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-15T23-01-38.445097.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-15T23-01-38.445097.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_02_15T23_01_38.445097", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-02-15T23-01-38.445097.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-02-15T23-01-38.445097.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_02_15T23_01_38.445097", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-02-15T23-01-38.445097.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-02-15T23-01-38.445097.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_02_15T23_01_38.445097", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-02-15T23-01-38.445097.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-02-15T23-01-38.445097.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_02_15T23_01_38.445097", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-15T23-01-38.445097.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-15T23-01-38.445097.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_02_15T23_01_38.445097", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-15T23-01-38.445097.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-15T23-01-38.445097.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_02_15T23_01_38.445097", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-15T23-01-38.445097.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-15T23-01-38.445097.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_02_15T23_01_38.445097", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-02-15T23-01-38.445097.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-02-15T23-01-38.445097.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_02_15T23_01_38.445097", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-15T23-01-38.445097.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-15T23-01-38.445097.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_02_15T23_01_38.445097", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-15T23-01-38.445097.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-15T23-01-38.445097.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_02_15T23_01_38.445097", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-15T23-01-38.445097.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-15T23-01-38.445097.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_02_15T23_01_38.445097", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-15T23-01-38.445097.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-15T23-01-38.445097.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_02_15T23_01_38.445097", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-02-15T23-01-38.445097.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-02-15T23-01-38.445097.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_02_15T23_01_38.445097", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-15T23-01-38.445097.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-15T23-01-38.445097.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_02_15T23_01_38.445097", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-15T23-01-38.445097.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-15T23-01-38.445097.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_02_15T23_01_38.445097", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-15T23-01-38.445097.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-15T23-01-38.445097.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_02_15T23_01_38.445097", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-15T23-01-38.445097.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-15T23-01-38.445097.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_02_15T23_01_38.445097", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-02-15T23-01-38.445097.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-02-15T23-01-38.445097.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_02_15T23_01_38.445097", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-02-15T23-01-38.445097.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-02-15T23-01-38.445097.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_02_15T23_01_38.445097", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-02-15T23-01-38.445097.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-02-15T23-01-38.445097.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_02_15T23_01_38.445097", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-02-15T23-01-38.445097.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-02-15T23-01-38.445097.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_02_15T23_01_38.445097", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-15T23-01-38.445097.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-15T23-01-38.445097.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_02_15T23_01_38.445097", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-02-15T23-01-38.445097.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-02-15T23-01-38.445097.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_02_15T23_01_38.445097", "path": ["**/details_harness|hendrycksTest-management|5_2024-02-15T23-01-38.445097.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-02-15T23-01-38.445097.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_02_15T23_01_38.445097", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-02-15T23-01-38.445097.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-02-15T23-01-38.445097.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_02_15T23_01_38.445097", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-02-15T23-01-38.445097.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-02-15T23-01-38.445097.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_02_15T23_01_38.445097", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-02-15T23-01-38.445097.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-02-15T23-01-38.445097.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_02_15T23_01_38.445097", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-02-15T23-01-38.445097.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-02-15T23-01-38.445097.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_02_15T23_01_38.445097", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-15T23-01-38.445097.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-15T23-01-38.445097.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_02_15T23_01_38.445097", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-02-15T23-01-38.445097.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-02-15T23-01-38.445097.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_02_15T23_01_38.445097", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-02-15T23-01-38.445097.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-02-15T23-01-38.445097.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_02_15T23_01_38.445097", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-02-15T23-01-38.445097.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-02-15T23-01-38.445097.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_02_15T23_01_38.445097", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-02-15T23-01-38.445097.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-02-15T23-01-38.445097.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_02_15T23_01_38.445097", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-02-15T23-01-38.445097.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-02-15T23-01-38.445097.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_02_15T23_01_38.445097", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-02-15T23-01-38.445097.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-02-15T23-01-38.445097.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_02_15T23_01_38.445097", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-02-15T23-01-38.445097.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-02-15T23-01-38.445097.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_02_15T23_01_38.445097", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-02-15T23-01-38.445097.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-02-15T23-01-38.445097.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_02_15T23_01_38.445097", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-02-15T23-01-38.445097.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-02-15T23-01-38.445097.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_02_15T23_01_38.445097", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-02-15T23-01-38.445097.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-02-15T23-01-38.445097.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_02_15T23_01_38.445097", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-15T23-01-38.445097.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-15T23-01-38.445097.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_02_15T23_01_38.445097", "path": ["**/details_harness|hendrycksTest-virology|5_2024-02-15T23-01-38.445097.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-02-15T23-01-38.445097.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_02_15T23_01_38.445097", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-02-15T23-01-38.445097.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-02-15T23-01-38.445097.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_02_15T23_01_38.445097", "path": ["**/details_harness|truthfulqa:mc|0_2024-02-15T23-01-38.445097.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-02-15T23-01-38.445097.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_02_15T23_01_38.445097", "path": ["**/details_harness|winogrande|5_2024-02-15T23-01-38.445097.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-02-15T23-01-38.445097.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_02_15T23_01_38.445097", "path": ["results_2024-02-15T23-01-38.445097.parquet"]}, {"split": "latest", "path": ["results_2024-02-15T23-01-38.445097.parquet"]}]}]}
2024-02-15T23:04:17+00:00
4be847cc89900bf9db1eba729a1ef42a1d9cca54
PAD6/drwithsquadformat
[ "region:us" ]
2024-02-15T23:12:14+00:00
{}
2024-02-15T23:15:49+00:00
967dd83abf6b67b4c5066e94dddbd7fe5ab0bfb6
Based on AEZAKMI V3, I removed some general airoboros things that made the model predictable and boring and changed up system prompts for wsb_001 prompts a bit.
adamo1139/AEZAKMI_v3-1
[ "license:other", "region:us" ]
2024-02-15T23:21:26+00:00
{"license": "other", "license_name": "other", "license_link": "LICENSE"}
2024-02-15T23:22:52+00:00
e683653ca7e2f3fa34251f89bec89df1ec1224c0
Mayareis/vozz
[ "license:openrail", "region:us" ]
2024-02-15T23:26:03+00:00
{"license": "openrail"}
2024-02-15T23:28:55+00:00
1e4dbe6985990654c62d8a29f959b7089989662c
chavinlo/hdvila100test
[ "region:us" ]
2024-02-15T23:58:40+00:00
{"dataset_info": {"features": [{"name": "clip_id", "dtype": "string"}, {"name": "video_id", "dtype": "string"}, {"name": "url", "dtype": "string"}, {"name": "span_start", "dtype": "string"}, {"name": "span_end", "dtype": "string"}, {"name": "caption", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 16430, "num_examples": 100}], "download_size": 8118, "dataset_size": 16430}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]}
2024-02-15T23:58:59+00:00
03a170667a7ec80246da008798e2203ab07a0867
chavinlo/hdvila5ktest
[ "region:us" ]
2024-02-16T00:00:26+00:00
{"dataset_info": {"features": [{"name": "clip_id", "dtype": "string"}, {"name": "video_id", "dtype": "string"}, {"name": "url", "dtype": "string"}, {"name": "span_start", "dtype": "string"}, {"name": "span_end", "dtype": "string"}, {"name": "caption", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 823279, "num_examples": 5000}], "download_size": 200764, "dataset_size": 823279}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]}
2024-02-16T00:00:36+00:00
7c92851a5d413d723a5611b7c94f6b78ceedd6d3
# MathPile ArXiv (subset) ## Description This dataset consists of 343,830 TeX files containing mathematics papers sourced from the arXiv. Training and testing sets are already split ## Source The data was obtained from the training + validation portion of the arXiv subset of MathPile. ## Format - LLaMa BOS and EOS tokens (`<s>` and `</s>`) have been added to mark the beginning and end of each sequence. - The dataset is organized into blocks of 64,000 documents each, stored in JSONL format. ## Usage - LaTeX stuff idk ## License The original data is subject to the licensing terms of the arXiv. Users should refer to the arXiv's terms of use for details on permissible usage.
aluncstokes/math_arxiv_temp
[ "region:us" ]
2024-02-16T00:10:27+00:00
{}
2024-02-17T02:59:11+00:00
2ae7aaafb4dd8278f21f6481930e66ceecd60197
furry-br/civilliam
[ "license:openrail", "region:us" ]
2024-02-16T00:11:18+00:00
{"license": "openrail"}
2024-02-16T00:11:58+00:00
764f1db623dacf9b4faeadbb043539255c868bb4
cnmoro/Text_Structuring_SOLAR_10.7B_Distilled_Smaller
[ "region:us" ]
2024-02-16T00:40:20+00:00
{}
2024-02-16T00:40:41+00:00
62a13de410999c20f60ec2f3be93457dcaac0d62
KomeijiForce/PropL
[ "region:us" ]
2024-02-16T00:41:51+00:00
{}
2024-02-16T00:47:05+00:00
07c0f3cc63403b9bf768d236c2f995ff2d67e61a
tyranus/trisvoice
[ "license:openrail", "region:us" ]
2024-02-16T00:43:39+00:00
{"license": "openrail"}
2024-02-16T01:00:07+00:00
aa4156d9096f814de79f427873c84750132ffd08
cogent-demon/l0lpusher69
[ "region:us" ]
2024-02-16T00:46:33+00:00
{}
2024-02-16T00:48:18+00:00
e9945407504d876959f0932b3e2387072b6ab099
# Dataset Card for Evaluation run of NovoCode/NeuralPaca-7b <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [NovoCode/NeuralPaca-7b](https://huggingface.co/NovoCode/NeuralPaca-7b) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_NovoCode__NeuralPaca-7b", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-02-16T00:47:24.688523](https://huggingface.co/datasets/open-llm-leaderboard/details_NovoCode__NeuralPaca-7b/blob/main/results_2024-02-16T00-47-24.688523.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.6323051718212915, "acc_stderr": 0.032436835484599615, "acc_norm": 0.6335791604628077, "acc_norm_stderr": 0.033101979094730234, "mc1": 0.35006119951040393, "mc1_stderr": 0.01669794942015103, "mc2": 0.4831753085957374, "mc2_stderr": 0.015324947436319568 }, "harness|arc:challenge|25": { "acc": 0.5921501706484642, "acc_stderr": 0.014361097288449707, "acc_norm": 0.6279863481228669, "acc_norm_stderr": 0.014124597881844461 }, "harness|hellaswag|10": { "acc": 0.632742481577375, "acc_stderr": 0.004810723108378215, "acc_norm": 0.8301135232025493, "acc_norm_stderr": 0.0037476555337545153 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.32, "acc_stderr": 0.04688261722621503, "acc_norm": 0.32, "acc_norm_stderr": 0.04688261722621503 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.6148148148148148, "acc_stderr": 0.04203921040156279, "acc_norm": 0.6148148148148148, "acc_norm_stderr": 0.04203921040156279 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.6644736842105263, "acc_stderr": 0.038424985593952694, "acc_norm": 0.6644736842105263, "acc_norm_stderr": 0.038424985593952694 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.58, "acc_stderr": 0.049604496374885836, "acc_norm": 0.58, "acc_norm_stderr": 0.049604496374885836 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.7018867924528301, "acc_stderr": 0.02815283794249387, "acc_norm": 0.7018867924528301, "acc_norm_stderr": 0.02815283794249387 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.7291666666666666, "acc_stderr": 0.03716177437566017, "acc_norm": 0.7291666666666666, "acc_norm_stderr": 0.03716177437566017 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.51, "acc_stderr": 0.05024183937956912, "acc_norm": 0.51, "acc_norm_stderr": 0.05024183937956912 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.57, "acc_stderr": 0.04975698519562428, "acc_norm": 0.57, "acc_norm_stderr": 0.04975698519562428 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.34, "acc_stderr": 0.04760952285695235, "acc_norm": 0.34, "acc_norm_stderr": 0.04760952285695235 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.6127167630057804, "acc_stderr": 0.03714325906302065, "acc_norm": 0.6127167630057804, "acc_norm_stderr": 0.03714325906302065 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.39215686274509803, "acc_stderr": 0.04858083574266345, "acc_norm": 0.39215686274509803, "acc_norm_stderr": 0.04858083574266345 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.75, "acc_stderr": 0.04351941398892446, "acc_norm": 0.75, "acc_norm_stderr": 0.04351941398892446 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.5234042553191489, "acc_stderr": 0.03265019475033582, "acc_norm": 0.5234042553191489, "acc_norm_stderr": 0.03265019475033582 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.4298245614035088, "acc_stderr": 0.04657047260594964, "acc_norm": 0.4298245614035088, "acc_norm_stderr": 0.04657047260594964 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.5448275862068965, "acc_stderr": 0.04149886942192117, "acc_norm": 0.5448275862068965, "acc_norm_stderr": 0.04149886942192117 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.41534391534391535, "acc_stderr": 0.025379524910778405, "acc_norm": 0.41534391534391535, "acc_norm_stderr": 0.025379524910778405 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.4365079365079365, "acc_stderr": 0.04435932892851466, "acc_norm": 0.4365079365079365, "acc_norm_stderr": 0.04435932892851466 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.29, "acc_stderr": 0.04560480215720684, "acc_norm": 0.29, "acc_norm_stderr": 0.04560480215720684 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.7612903225806451, "acc_stderr": 0.024251071262208837, "acc_norm": 0.7612903225806451, "acc_norm_stderr": 0.024251071262208837 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.49261083743842365, "acc_stderr": 0.035176035403610084, "acc_norm": 0.49261083743842365, "acc_norm_stderr": 0.035176035403610084 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.68, "acc_stderr": 0.04688261722621504, "acc_norm": 0.68, "acc_norm_stderr": 0.04688261722621504 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.7818181818181819, "acc_stderr": 0.03225078108306289, "acc_norm": 0.7818181818181819, "acc_norm_stderr": 0.03225078108306289 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.7929292929292929, "acc_stderr": 0.028869778460267045, "acc_norm": 0.7929292929292929, "acc_norm_stderr": 0.028869778460267045 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.8911917098445595, "acc_stderr": 0.022473253332768787, "acc_norm": 0.8911917098445595, "acc_norm_stderr": 0.022473253332768787 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.6435897435897436, "acc_stderr": 0.02428314052946731, "acc_norm": 0.6435897435897436, "acc_norm_stderr": 0.02428314052946731 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.35555555555555557, "acc_stderr": 0.02918571494985741, "acc_norm": 0.35555555555555557, "acc_norm_stderr": 0.02918571494985741 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.6764705882352942, "acc_stderr": 0.03038835355188679, "acc_norm": 0.6764705882352942, "acc_norm_stderr": 0.03038835355188679 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.3708609271523179, "acc_stderr": 0.03943966699183629, "acc_norm": 0.3708609271523179, "acc_norm_stderr": 0.03943966699183629 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.8385321100917431, "acc_stderr": 0.01577623925616323, "acc_norm": 0.8385321100917431, "acc_norm_stderr": 0.01577623925616323 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.5046296296296297, "acc_stderr": 0.03409825519163572, "acc_norm": 0.5046296296296297, "acc_norm_stderr": 0.03409825519163572 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.8088235294117647, "acc_stderr": 0.027599174300640766, "acc_norm": 0.8088235294117647, "acc_norm_stderr": 0.027599174300640766 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.7763713080168776, "acc_stderr": 0.027123298205229966, "acc_norm": 0.7763713080168776, "acc_norm_stderr": 0.027123298205229966 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.6995515695067265, "acc_stderr": 0.030769352008229146, "acc_norm": 0.6995515695067265, "acc_norm_stderr": 0.030769352008229146 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.7709923664122137, "acc_stderr": 0.036853466317118506, "acc_norm": 0.7709923664122137, "acc_norm_stderr": 0.036853466317118506 }, "harness|hendrycksTest-international_law|5": { "acc": 0.7851239669421488, "acc_stderr": 0.037494924487096966, "acc_norm": 0.7851239669421488, "acc_norm_stderr": 0.037494924487096966 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.7314814814814815, "acc_stderr": 0.042844679680521934, "acc_norm": 0.7314814814814815, "acc_norm_stderr": 0.042844679680521934 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.7484662576687117, "acc_stderr": 0.03408997886857529, "acc_norm": 0.7484662576687117, "acc_norm_stderr": 0.03408997886857529 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.44642857142857145, "acc_stderr": 0.04718471485219588, "acc_norm": 0.44642857142857145, "acc_norm_stderr": 0.04718471485219588 }, "harness|hendrycksTest-management|5": { "acc": 0.8252427184466019, "acc_stderr": 0.03760178006026621, "acc_norm": 0.8252427184466019, "acc_norm_stderr": 0.03760178006026621 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8846153846153846, "acc_stderr": 0.020930193185179326, "acc_norm": 0.8846153846153846, "acc_norm_stderr": 0.020930193185179326 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.67, "acc_stderr": 0.04725815626252609, "acc_norm": 0.67, "acc_norm_stderr": 0.04725815626252609 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.8173690932311622, "acc_stderr": 0.013816335389973133, "acc_norm": 0.8173690932311622, "acc_norm_stderr": 0.013816335389973133 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.7196531791907514, "acc_stderr": 0.02418242749657761, "acc_norm": 0.7196531791907514, "acc_norm_stderr": 0.02418242749657761 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.32625698324022345, "acc_stderr": 0.01568044151888918, "acc_norm": 0.32625698324022345, "acc_norm_stderr": 0.01568044151888918 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.6862745098039216, "acc_stderr": 0.026568921015457162, "acc_norm": 0.6862745098039216, "acc_norm_stderr": 0.026568921015457162 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.7106109324758842, "acc_stderr": 0.025755865922632945, "acc_norm": 0.7106109324758842, "acc_norm_stderr": 0.025755865922632945 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.7283950617283951, "acc_stderr": 0.02474862449053737, "acc_norm": 0.7283950617283951, "acc_norm_stderr": 0.02474862449053737 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.4574468085106383, "acc_stderr": 0.02971928127223685, "acc_norm": 0.4574468085106383, "acc_norm_stderr": 0.02971928127223685 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.439374185136897, "acc_stderr": 0.012676014778580214, "acc_norm": 0.439374185136897, "acc_norm_stderr": 0.012676014778580214 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.6397058823529411, "acc_stderr": 0.029163128570670733, "acc_norm": 0.6397058823529411, "acc_norm_stderr": 0.029163128570670733 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.6388888888888888, "acc_stderr": 0.01943177567703731, "acc_norm": 0.6388888888888888, "acc_norm_stderr": 0.01943177567703731 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6818181818181818, "acc_stderr": 0.044612721759105085, "acc_norm": 0.6818181818181818, "acc_norm_stderr": 0.044612721759105085 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.6816326530612244, "acc_stderr": 0.029822533793982066, "acc_norm": 0.6816326530612244, "acc_norm_stderr": 0.029822533793982066 }, "harness|hendrycksTest-sociology|5": { "acc": 0.8258706467661692, "acc_stderr": 0.026814951200421603, "acc_norm": 0.8258706467661692, "acc_norm_stderr": 0.026814951200421603 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.83, "acc_stderr": 0.03775251680686371, "acc_norm": 0.83, "acc_norm_stderr": 0.03775251680686371 }, "harness|hendrycksTest-virology|5": { "acc": 0.5301204819277109, "acc_stderr": 0.03885425420866767, "acc_norm": 0.5301204819277109, "acc_norm_stderr": 0.03885425420866767 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.847953216374269, "acc_stderr": 0.02753912288906145, "acc_norm": 0.847953216374269, "acc_norm_stderr": 0.02753912288906145 }, "harness|truthfulqa:mc|0": { "mc1": 0.35006119951040393, "mc1_stderr": 0.01669794942015103, "mc2": 0.4831753085957374, "mc2_stderr": 0.015324947436319568 }, "harness|winogrande|5": { "acc": 0.8168902920284136, "acc_stderr": 0.01086977863316836 }, "harness|gsm8k|5": { "acc": 0.6057619408642911, "acc_stderr": 0.013460852357095666 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_NovoCode__NeuralPaca-7b
[ "region:us" ]
2024-02-16T00:49:45+00:00
{"pretty_name": "Evaluation run of NovoCode/NeuralPaca-7b", "dataset_summary": "Dataset automatically created during the evaluation run of model [NovoCode/NeuralPaca-7b](https://huggingface.co/NovoCode/NeuralPaca-7b) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_NovoCode__NeuralPaca-7b\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-02-16T00:47:24.688523](https://huggingface.co/datasets/open-llm-leaderboard/details_NovoCode__NeuralPaca-7b/blob/main/results_2024-02-16T00-47-24.688523.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.6323051718212915,\n \"acc_stderr\": 0.032436835484599615,\n \"acc_norm\": 0.6335791604628077,\n \"acc_norm_stderr\": 0.033101979094730234,\n \"mc1\": 0.35006119951040393,\n \"mc1_stderr\": 0.01669794942015103,\n \"mc2\": 0.4831753085957374,\n \"mc2_stderr\": 0.015324947436319568\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.5921501706484642,\n \"acc_stderr\": 0.014361097288449707,\n \"acc_norm\": 0.6279863481228669,\n \"acc_norm_stderr\": 0.014124597881844461\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.632742481577375,\n \"acc_stderr\": 0.004810723108378215,\n \"acc_norm\": 0.8301135232025493,\n \"acc_norm_stderr\": 0.0037476555337545153\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.32,\n \"acc_stderr\": 0.04688261722621503,\n \"acc_norm\": 0.32,\n \"acc_norm_stderr\": 0.04688261722621503\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.6148148148148148,\n \"acc_stderr\": 0.04203921040156279,\n \"acc_norm\": 0.6148148148148148,\n \"acc_norm_stderr\": 0.04203921040156279\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.6644736842105263,\n \"acc_stderr\": 0.038424985593952694,\n \"acc_norm\": 0.6644736842105263,\n \"acc_norm_stderr\": 0.038424985593952694\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.58,\n \"acc_stderr\": 0.049604496374885836,\n \"acc_norm\": 0.58,\n \"acc_norm_stderr\": 0.049604496374885836\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.7018867924528301,\n \"acc_stderr\": 0.02815283794249387,\n \"acc_norm\": 0.7018867924528301,\n \"acc_norm_stderr\": 0.02815283794249387\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.7291666666666666,\n \"acc_stderr\": 0.03716177437566017,\n \"acc_norm\": 0.7291666666666666,\n \"acc_norm_stderr\": 0.03716177437566017\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.51,\n \"acc_stderr\": 0.05024183937956912,\n \"acc_norm\": 0.51,\n \"acc_norm_stderr\": 0.05024183937956912\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.57,\n \"acc_stderr\": 0.04975698519562428,\n \"acc_norm\": 0.57,\n \"acc_norm_stderr\": 0.04975698519562428\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.34,\n \"acc_stderr\": 0.04760952285695235,\n \"acc_norm\": 0.34,\n \"acc_norm_stderr\": 0.04760952285695235\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.6127167630057804,\n \"acc_stderr\": 0.03714325906302065,\n \"acc_norm\": 0.6127167630057804,\n \"acc_norm_stderr\": 0.03714325906302065\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.39215686274509803,\n \"acc_stderr\": 0.04858083574266345,\n \"acc_norm\": 0.39215686274509803,\n \"acc_norm_stderr\": 0.04858083574266345\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.75,\n \"acc_stderr\": 0.04351941398892446,\n \"acc_norm\": 0.75,\n \"acc_norm_stderr\": 0.04351941398892446\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.5234042553191489,\n \"acc_stderr\": 0.03265019475033582,\n \"acc_norm\": 0.5234042553191489,\n \"acc_norm_stderr\": 0.03265019475033582\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.4298245614035088,\n \"acc_stderr\": 0.04657047260594964,\n \"acc_norm\": 0.4298245614035088,\n \"acc_norm_stderr\": 0.04657047260594964\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.5448275862068965,\n \"acc_stderr\": 0.04149886942192117,\n \"acc_norm\": 0.5448275862068965,\n \"acc_norm_stderr\": 0.04149886942192117\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.41534391534391535,\n \"acc_stderr\": 0.025379524910778405,\n \"acc_norm\": 0.41534391534391535,\n \"acc_norm_stderr\": 0.025379524910778405\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.4365079365079365,\n \"acc_stderr\": 0.04435932892851466,\n \"acc_norm\": 0.4365079365079365,\n \"acc_norm_stderr\": 0.04435932892851466\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.29,\n \"acc_stderr\": 0.04560480215720684,\n \"acc_norm\": 0.29,\n \"acc_norm_stderr\": 0.04560480215720684\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.7612903225806451,\n \"acc_stderr\": 0.024251071262208837,\n \"acc_norm\": 0.7612903225806451,\n \"acc_norm_stderr\": 0.024251071262208837\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.49261083743842365,\n \"acc_stderr\": 0.035176035403610084,\n \"acc_norm\": 0.49261083743842365,\n \"acc_norm_stderr\": 0.035176035403610084\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.68,\n \"acc_stderr\": 0.04688261722621504,\n \"acc_norm\": 0.68,\n \"acc_norm_stderr\": 0.04688261722621504\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.7818181818181819,\n \"acc_stderr\": 0.03225078108306289,\n \"acc_norm\": 0.7818181818181819,\n \"acc_norm_stderr\": 0.03225078108306289\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.7929292929292929,\n \"acc_stderr\": 0.028869778460267045,\n \"acc_norm\": 0.7929292929292929,\n \"acc_norm_stderr\": 0.028869778460267045\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.8911917098445595,\n \"acc_stderr\": 0.022473253332768787,\n \"acc_norm\": 0.8911917098445595,\n \"acc_norm_stderr\": 0.022473253332768787\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.6435897435897436,\n \"acc_stderr\": 0.02428314052946731,\n \"acc_norm\": 0.6435897435897436,\n \"acc_norm_stderr\": 0.02428314052946731\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.35555555555555557,\n \"acc_stderr\": 0.02918571494985741,\n \"acc_norm\": 0.35555555555555557,\n \"acc_norm_stderr\": 0.02918571494985741\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.6764705882352942,\n \"acc_stderr\": 0.03038835355188679,\n \"acc_norm\": 0.6764705882352942,\n \"acc_norm_stderr\": 0.03038835355188679\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.3708609271523179,\n \"acc_stderr\": 0.03943966699183629,\n \"acc_norm\": 0.3708609271523179,\n \"acc_norm_stderr\": 0.03943966699183629\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.8385321100917431,\n \"acc_stderr\": 0.01577623925616323,\n \"acc_norm\": 0.8385321100917431,\n \"acc_norm_stderr\": 0.01577623925616323\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.5046296296296297,\n \"acc_stderr\": 0.03409825519163572,\n \"acc_norm\": 0.5046296296296297,\n \"acc_norm_stderr\": 0.03409825519163572\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.8088235294117647,\n \"acc_stderr\": 0.027599174300640766,\n \"acc_norm\": 0.8088235294117647,\n \"acc_norm_stderr\": 0.027599174300640766\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.7763713080168776,\n \"acc_stderr\": 0.027123298205229966,\n \"acc_norm\": 0.7763713080168776,\n \"acc_norm_stderr\": 0.027123298205229966\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.6995515695067265,\n \"acc_stderr\": 0.030769352008229146,\n \"acc_norm\": 0.6995515695067265,\n \"acc_norm_stderr\": 0.030769352008229146\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.7709923664122137,\n \"acc_stderr\": 0.036853466317118506,\n \"acc_norm\": 0.7709923664122137,\n \"acc_norm_stderr\": 0.036853466317118506\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.7851239669421488,\n \"acc_stderr\": 0.037494924487096966,\n \"acc_norm\": 0.7851239669421488,\n \"acc_norm_stderr\": 0.037494924487096966\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.7314814814814815,\n \"acc_stderr\": 0.042844679680521934,\n \"acc_norm\": 0.7314814814814815,\n \"acc_norm_stderr\": 0.042844679680521934\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.7484662576687117,\n \"acc_stderr\": 0.03408997886857529,\n \"acc_norm\": 0.7484662576687117,\n \"acc_norm_stderr\": 0.03408997886857529\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.44642857142857145,\n \"acc_stderr\": 0.04718471485219588,\n \"acc_norm\": 0.44642857142857145,\n \"acc_norm_stderr\": 0.04718471485219588\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.8252427184466019,\n \"acc_stderr\": 0.03760178006026621,\n \"acc_norm\": 0.8252427184466019,\n \"acc_norm_stderr\": 0.03760178006026621\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8846153846153846,\n \"acc_stderr\": 0.020930193185179326,\n \"acc_norm\": 0.8846153846153846,\n \"acc_norm_stderr\": 0.020930193185179326\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.67,\n \"acc_stderr\": 0.04725815626252609,\n \"acc_norm\": 0.67,\n \"acc_norm_stderr\": 0.04725815626252609\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.8173690932311622,\n \"acc_stderr\": 0.013816335389973133,\n \"acc_norm\": 0.8173690932311622,\n \"acc_norm_stderr\": 0.013816335389973133\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.7196531791907514,\n \"acc_stderr\": 0.02418242749657761,\n \"acc_norm\": 0.7196531791907514,\n \"acc_norm_stderr\": 0.02418242749657761\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.32625698324022345,\n \"acc_stderr\": 0.01568044151888918,\n \"acc_norm\": 0.32625698324022345,\n \"acc_norm_stderr\": 0.01568044151888918\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.6862745098039216,\n \"acc_stderr\": 0.026568921015457162,\n \"acc_norm\": 0.6862745098039216,\n \"acc_norm_stderr\": 0.026568921015457162\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.7106109324758842,\n \"acc_stderr\": 0.025755865922632945,\n \"acc_norm\": 0.7106109324758842,\n \"acc_norm_stderr\": 0.025755865922632945\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.7283950617283951,\n \"acc_stderr\": 0.02474862449053737,\n \"acc_norm\": 0.7283950617283951,\n \"acc_norm_stderr\": 0.02474862449053737\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.4574468085106383,\n \"acc_stderr\": 0.02971928127223685,\n \"acc_norm\": 0.4574468085106383,\n \"acc_norm_stderr\": 0.02971928127223685\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.439374185136897,\n \"acc_stderr\": 0.012676014778580214,\n \"acc_norm\": 0.439374185136897,\n \"acc_norm_stderr\": 0.012676014778580214\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.6397058823529411,\n \"acc_stderr\": 0.029163128570670733,\n \"acc_norm\": 0.6397058823529411,\n \"acc_norm_stderr\": 0.029163128570670733\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.6388888888888888,\n \"acc_stderr\": 0.01943177567703731,\n \"acc_norm\": 0.6388888888888888,\n \"acc_norm_stderr\": 0.01943177567703731\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6818181818181818,\n \"acc_stderr\": 0.044612721759105085,\n \"acc_norm\": 0.6818181818181818,\n \"acc_norm_stderr\": 0.044612721759105085\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.6816326530612244,\n \"acc_stderr\": 0.029822533793982066,\n \"acc_norm\": 0.6816326530612244,\n \"acc_norm_stderr\": 0.029822533793982066\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.8258706467661692,\n \"acc_stderr\": 0.026814951200421603,\n \"acc_norm\": 0.8258706467661692,\n \"acc_norm_stderr\": 0.026814951200421603\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.83,\n \"acc_stderr\": 0.03775251680686371,\n \"acc_norm\": 0.83,\n \"acc_norm_stderr\": 0.03775251680686371\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5301204819277109,\n \"acc_stderr\": 0.03885425420866767,\n \"acc_norm\": 0.5301204819277109,\n \"acc_norm_stderr\": 0.03885425420866767\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.847953216374269,\n \"acc_stderr\": 0.02753912288906145,\n \"acc_norm\": 0.847953216374269,\n \"acc_norm_stderr\": 0.02753912288906145\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.35006119951040393,\n \"mc1_stderr\": 0.01669794942015103,\n \"mc2\": 0.4831753085957374,\n \"mc2_stderr\": 0.015324947436319568\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.8168902920284136,\n \"acc_stderr\": 0.01086977863316836\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.6057619408642911,\n \"acc_stderr\": 0.013460852357095666\n }\n}\n```", "repo_url": "https://huggingface.co/NovoCode/NeuralPaca-7b", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_02_16T00_47_24.688523", "path": ["**/details_harness|arc:challenge|25_2024-02-16T00-47-24.688523.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-02-16T00-47-24.688523.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_02_16T00_47_24.688523", "path": ["**/details_harness|gsm8k|5_2024-02-16T00-47-24.688523.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-02-16T00-47-24.688523.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_02_16T00_47_24.688523", "path": ["**/details_harness|hellaswag|10_2024-02-16T00-47-24.688523.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-02-16T00-47-24.688523.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_02_16T00_47_24.688523", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-16T00-47-24.688523.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-02-16T00-47-24.688523.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-02-16T00-47-24.688523.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-02-16T00-47-24.688523.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-16T00-47-24.688523.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-02-16T00-47-24.688523.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-02-16T00-47-24.688523.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-02-16T00-47-24.688523.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-02-16T00-47-24.688523.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-02-16T00-47-24.688523.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-02-16T00-47-24.688523.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-02-16T00-47-24.688523.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-16T00-47-24.688523.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-02-16T00-47-24.688523.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-16T00-47-24.688523.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-16T00-47-24.688523.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-02-16T00-47-24.688523.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-02-16T00-47-24.688523.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-02-16T00-47-24.688523.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-16T00-47-24.688523.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-16T00-47-24.688523.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-16T00-47-24.688523.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-02-16T00-47-24.688523.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-16T00-47-24.688523.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-16T00-47-24.688523.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-16T00-47-24.688523.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-16T00-47-24.688523.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-02-16T00-47-24.688523.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-16T00-47-24.688523.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-16T00-47-24.688523.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-16T00-47-24.688523.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-16T00-47-24.688523.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-02-16T00-47-24.688523.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-02-16T00-47-24.688523.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-02-16T00-47-24.688523.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-02-16T00-47-24.688523.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-16T00-47-24.688523.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-02-16T00-47-24.688523.parquet", "**/details_harness|hendrycksTest-management|5_2024-02-16T00-47-24.688523.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-02-16T00-47-24.688523.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-02-16T00-47-24.688523.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-02-16T00-47-24.688523.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-02-16T00-47-24.688523.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-16T00-47-24.688523.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-02-16T00-47-24.688523.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-02-16T00-47-24.688523.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-02-16T00-47-24.688523.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-02-16T00-47-24.688523.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-02-16T00-47-24.688523.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-02-16T00-47-24.688523.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-02-16T00-47-24.688523.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-02-16T00-47-24.688523.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-02-16T00-47-24.688523.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-02-16T00-47-24.688523.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-16T00-47-24.688523.parquet", "**/details_harness|hendrycksTest-virology|5_2024-02-16T00-47-24.688523.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-02-16T00-47-24.688523.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-16T00-47-24.688523.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-02-16T00-47-24.688523.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-02-16T00-47-24.688523.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-02-16T00-47-24.688523.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-16T00-47-24.688523.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-02-16T00-47-24.688523.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-02-16T00-47-24.688523.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-02-16T00-47-24.688523.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-02-16T00-47-24.688523.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-02-16T00-47-24.688523.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-02-16T00-47-24.688523.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-02-16T00-47-24.688523.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-16T00-47-24.688523.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-02-16T00-47-24.688523.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-16T00-47-24.688523.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-16T00-47-24.688523.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-02-16T00-47-24.688523.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-02-16T00-47-24.688523.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-02-16T00-47-24.688523.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-16T00-47-24.688523.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-16T00-47-24.688523.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-16T00-47-24.688523.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-02-16T00-47-24.688523.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-16T00-47-24.688523.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-16T00-47-24.688523.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-16T00-47-24.688523.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-16T00-47-24.688523.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-02-16T00-47-24.688523.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-16T00-47-24.688523.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-16T00-47-24.688523.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-16T00-47-24.688523.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-16T00-47-24.688523.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-02-16T00-47-24.688523.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-02-16T00-47-24.688523.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-02-16T00-47-24.688523.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-02-16T00-47-24.688523.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-16T00-47-24.688523.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-02-16T00-47-24.688523.parquet", "**/details_harness|hendrycksTest-management|5_2024-02-16T00-47-24.688523.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-02-16T00-47-24.688523.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-02-16T00-47-24.688523.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-02-16T00-47-24.688523.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-02-16T00-47-24.688523.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-16T00-47-24.688523.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-02-16T00-47-24.688523.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-02-16T00-47-24.688523.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-02-16T00-47-24.688523.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-02-16T00-47-24.688523.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-02-16T00-47-24.688523.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-02-16T00-47-24.688523.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-02-16T00-47-24.688523.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-02-16T00-47-24.688523.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-02-16T00-47-24.688523.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-02-16T00-47-24.688523.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-16T00-47-24.688523.parquet", "**/details_harness|hendrycksTest-virology|5_2024-02-16T00-47-24.688523.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-02-16T00-47-24.688523.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_02_16T00_47_24.688523", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-16T00-47-24.688523.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-16T00-47-24.688523.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_02_16T00_47_24.688523", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-02-16T00-47-24.688523.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-02-16T00-47-24.688523.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_02_16T00_47_24.688523", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-02-16T00-47-24.688523.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-02-16T00-47-24.688523.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_02_16T00_47_24.688523", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-02-16T00-47-24.688523.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-02-16T00-47-24.688523.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_02_16T00_47_24.688523", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-16T00-47-24.688523.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-16T00-47-24.688523.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_02_16T00_47_24.688523", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-02-16T00-47-24.688523.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-02-16T00-47-24.688523.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_02_16T00_47_24.688523", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-02-16T00-47-24.688523.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-02-16T00-47-24.688523.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_02_16T00_47_24.688523", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-02-16T00-47-24.688523.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-02-16T00-47-24.688523.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_02_16T00_47_24.688523", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-02-16T00-47-24.688523.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-02-16T00-47-24.688523.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_02_16T00_47_24.688523", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-02-16T00-47-24.688523.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-02-16T00-47-24.688523.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_02_16T00_47_24.688523", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-02-16T00-47-24.688523.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-02-16T00-47-24.688523.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_02_16T00_47_24.688523", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-02-16T00-47-24.688523.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-02-16T00-47-24.688523.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_02_16T00_47_24.688523", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-16T00-47-24.688523.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-16T00-47-24.688523.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_02_16T00_47_24.688523", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-02-16T00-47-24.688523.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-02-16T00-47-24.688523.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_02_16T00_47_24.688523", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-16T00-47-24.688523.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-16T00-47-24.688523.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_02_16T00_47_24.688523", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-16T00-47-24.688523.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-16T00-47-24.688523.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_02_16T00_47_24.688523", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-02-16T00-47-24.688523.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-02-16T00-47-24.688523.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_02_16T00_47_24.688523", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-02-16T00-47-24.688523.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-02-16T00-47-24.688523.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_02_16T00_47_24.688523", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-02-16T00-47-24.688523.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-02-16T00-47-24.688523.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_02_16T00_47_24.688523", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-16T00-47-24.688523.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-16T00-47-24.688523.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_02_16T00_47_24.688523", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-16T00-47-24.688523.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-16T00-47-24.688523.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_02_16T00_47_24.688523", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-16T00-47-24.688523.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-16T00-47-24.688523.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_02_16T00_47_24.688523", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-02-16T00-47-24.688523.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-02-16T00-47-24.688523.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_02_16T00_47_24.688523", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-16T00-47-24.688523.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-16T00-47-24.688523.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_02_16T00_47_24.688523", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-16T00-47-24.688523.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-16T00-47-24.688523.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_02_16T00_47_24.688523", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-16T00-47-24.688523.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-16T00-47-24.688523.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_02_16T00_47_24.688523", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-16T00-47-24.688523.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-16T00-47-24.688523.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_02_16T00_47_24.688523", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-02-16T00-47-24.688523.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-02-16T00-47-24.688523.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_02_16T00_47_24.688523", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-16T00-47-24.688523.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-16T00-47-24.688523.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_02_16T00_47_24.688523", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-16T00-47-24.688523.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-16T00-47-24.688523.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_02_16T00_47_24.688523", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-16T00-47-24.688523.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-16T00-47-24.688523.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_02_16T00_47_24.688523", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-16T00-47-24.688523.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-16T00-47-24.688523.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_02_16T00_47_24.688523", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-02-16T00-47-24.688523.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-02-16T00-47-24.688523.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_02_16T00_47_24.688523", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-02-16T00-47-24.688523.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-02-16T00-47-24.688523.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_02_16T00_47_24.688523", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-02-16T00-47-24.688523.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-02-16T00-47-24.688523.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_02_16T00_47_24.688523", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-02-16T00-47-24.688523.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-02-16T00-47-24.688523.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_02_16T00_47_24.688523", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-16T00-47-24.688523.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-16T00-47-24.688523.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_02_16T00_47_24.688523", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-02-16T00-47-24.688523.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-02-16T00-47-24.688523.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_02_16T00_47_24.688523", "path": ["**/details_harness|hendrycksTest-management|5_2024-02-16T00-47-24.688523.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-02-16T00-47-24.688523.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_02_16T00_47_24.688523", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-02-16T00-47-24.688523.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-02-16T00-47-24.688523.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_02_16T00_47_24.688523", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-02-16T00-47-24.688523.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-02-16T00-47-24.688523.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_02_16T00_47_24.688523", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-02-16T00-47-24.688523.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-02-16T00-47-24.688523.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_02_16T00_47_24.688523", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-02-16T00-47-24.688523.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-02-16T00-47-24.688523.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_02_16T00_47_24.688523", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-16T00-47-24.688523.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-16T00-47-24.688523.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_02_16T00_47_24.688523", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-02-16T00-47-24.688523.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-02-16T00-47-24.688523.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_02_16T00_47_24.688523", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-02-16T00-47-24.688523.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-02-16T00-47-24.688523.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_02_16T00_47_24.688523", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-02-16T00-47-24.688523.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-02-16T00-47-24.688523.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_02_16T00_47_24.688523", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-02-16T00-47-24.688523.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-02-16T00-47-24.688523.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_02_16T00_47_24.688523", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-02-16T00-47-24.688523.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-02-16T00-47-24.688523.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_02_16T00_47_24.688523", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-02-16T00-47-24.688523.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-02-16T00-47-24.688523.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_02_16T00_47_24.688523", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-02-16T00-47-24.688523.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-02-16T00-47-24.688523.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_02_16T00_47_24.688523", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-02-16T00-47-24.688523.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-02-16T00-47-24.688523.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_02_16T00_47_24.688523", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-02-16T00-47-24.688523.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-02-16T00-47-24.688523.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_02_16T00_47_24.688523", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-02-16T00-47-24.688523.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-02-16T00-47-24.688523.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_02_16T00_47_24.688523", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-16T00-47-24.688523.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-16T00-47-24.688523.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_02_16T00_47_24.688523", "path": ["**/details_harness|hendrycksTest-virology|5_2024-02-16T00-47-24.688523.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-02-16T00-47-24.688523.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_02_16T00_47_24.688523", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-02-16T00-47-24.688523.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-02-16T00-47-24.688523.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_02_16T00_47_24.688523", "path": ["**/details_harness|truthfulqa:mc|0_2024-02-16T00-47-24.688523.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-02-16T00-47-24.688523.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_02_16T00_47_24.688523", "path": ["**/details_harness|winogrande|5_2024-02-16T00-47-24.688523.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-02-16T00-47-24.688523.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_02_16T00_47_24.688523", "path": ["results_2024-02-16T00-47-24.688523.parquet"]}, {"split": "latest", "path": ["results_2024-02-16T00-47-24.688523.parquet"]}]}]}
2024-02-16T00:50:09+00:00
693d353fc9bd9e57ca97a05a8175a7abfdc752d0
# Dataset Card for Evaluation run of jeiku/NarrativeNexus_7B <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [jeiku/NarrativeNexus_7B](https://huggingface.co/jeiku/NarrativeNexus_7B) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_jeiku__NarrativeNexus_7B", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-02-16T01:30:29.349287](https://huggingface.co/datasets/open-llm-leaderboard/details_jeiku__NarrativeNexus_7B/blob/main/results_2024-02-16T01-30-29.349287.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.6331502373053775, "acc_stderr": 0.032649477056743835, "acc_norm": 0.6360612367088411, "acc_norm_stderr": 0.03330403787596569, "mc1": 0.46878824969400246, "mc1_stderr": 0.017469364874577537, "mc2": 0.6394506791157332, "mc2_stderr": 0.015272071804569947 }, "harness|arc:challenge|25": { "acc": 0.6279863481228669, "acc_stderr": 0.01412459788184446, "acc_norm": 0.6612627986348123, "acc_norm_stderr": 0.01383056892797433 }, "harness|hellaswag|10": { "acc": 0.6773551085441147, "acc_stderr": 0.004665327309399188, "acc_norm": 0.8573989245170285, "acc_norm_stderr": 0.003489509493001621 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.36, "acc_stderr": 0.04824181513244218, "acc_norm": 0.36, "acc_norm_stderr": 0.04824181513244218 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.6296296296296297, "acc_stderr": 0.041716541613545426, "acc_norm": 0.6296296296296297, "acc_norm_stderr": 0.041716541613545426 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.6776315789473685, "acc_stderr": 0.03803510248351585, "acc_norm": 0.6776315789473685, "acc_norm_stderr": 0.03803510248351585 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.54, "acc_stderr": 0.05009082659620332, "acc_norm": 0.54, "acc_norm_stderr": 0.05009082659620332 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.6943396226415094, "acc_stderr": 0.028353298073322663, "acc_norm": 0.6943396226415094, "acc_norm_stderr": 0.028353298073322663 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.7638888888888888, "acc_stderr": 0.03551446610810826, "acc_norm": 0.7638888888888888, "acc_norm_stderr": 0.03551446610810826 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.48, "acc_stderr": 0.050211673156867795, "acc_norm": 0.48, "acc_norm_stderr": 0.050211673156867795 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.55, "acc_stderr": 0.05, "acc_norm": 0.55, "acc_norm_stderr": 0.05 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.3, "acc_stderr": 0.046056618647183814, "acc_norm": 0.3, "acc_norm_stderr": 0.046056618647183814 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.6358381502890174, "acc_stderr": 0.03669072477416907, "acc_norm": 0.6358381502890174, "acc_norm_stderr": 0.03669072477416907 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.3431372549019608, "acc_stderr": 0.04724007352383888, "acc_norm": 0.3431372549019608, "acc_norm_stderr": 0.04724007352383888 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.72, "acc_stderr": 0.04512608598542127, "acc_norm": 0.72, "acc_norm_stderr": 0.04512608598542127 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.5787234042553191, "acc_stderr": 0.03227834510146268, "acc_norm": 0.5787234042553191, "acc_norm_stderr": 0.03227834510146268 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.47368421052631576, "acc_stderr": 0.046970851366478626, "acc_norm": 0.47368421052631576, "acc_norm_stderr": 0.046970851366478626 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.5793103448275863, "acc_stderr": 0.04113914981189261, "acc_norm": 0.5793103448275863, "acc_norm_stderr": 0.04113914981189261 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.3888888888888889, "acc_stderr": 0.025107425481137285, "acc_norm": 0.3888888888888889, "acc_norm_stderr": 0.025107425481137285 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.46825396825396826, "acc_stderr": 0.04463112720677171, "acc_norm": 0.46825396825396826, "acc_norm_stderr": 0.04463112720677171 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.42, "acc_stderr": 0.049604496374885836, "acc_norm": 0.42, "acc_norm_stderr": 0.049604496374885836 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.7645161290322581, "acc_stderr": 0.02413763242933771, "acc_norm": 0.7645161290322581, "acc_norm_stderr": 0.02413763242933771 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.5123152709359606, "acc_stderr": 0.035169204442208966, "acc_norm": 0.5123152709359606, "acc_norm_stderr": 0.035169204442208966 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.68, "acc_stderr": 0.046882617226215034, "acc_norm": 0.68, "acc_norm_stderr": 0.046882617226215034 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.7636363636363637, "acc_stderr": 0.033175059300091805, "acc_norm": 0.7636363636363637, "acc_norm_stderr": 0.033175059300091805 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.7929292929292929, "acc_stderr": 0.028869778460267042, "acc_norm": 0.7929292929292929, "acc_norm_stderr": 0.028869778460267042 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.8601036269430051, "acc_stderr": 0.025033870583015184, "acc_norm": 0.8601036269430051, "acc_norm_stderr": 0.025033870583015184 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.6666666666666666, "acc_stderr": 0.023901157979402534, "acc_norm": 0.6666666666666666, "acc_norm_stderr": 0.023901157979402534 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.3888888888888889, "acc_stderr": 0.029723278961476664, "acc_norm": 0.3888888888888889, "acc_norm_stderr": 0.029723278961476664 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.6848739495798319, "acc_stderr": 0.030176808288974337, "acc_norm": 0.6848739495798319, "acc_norm_stderr": 0.030176808288974337 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.3509933774834437, "acc_stderr": 0.03896981964257375, "acc_norm": 0.3509933774834437, "acc_norm_stderr": 0.03896981964257375 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.8036697247706422, "acc_stderr": 0.01703071933915434, "acc_norm": 0.8036697247706422, "acc_norm_stderr": 0.01703071933915434 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.49074074074074076, "acc_stderr": 0.03409386946992699, "acc_norm": 0.49074074074074076, "acc_norm_stderr": 0.03409386946992699 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.7892156862745098, "acc_stderr": 0.028626547912437406, "acc_norm": 0.7892156862745098, "acc_norm_stderr": 0.028626547912437406 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.7552742616033755, "acc_stderr": 0.027985699387036423, "acc_norm": 0.7552742616033755, "acc_norm_stderr": 0.027985699387036423 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.695067264573991, "acc_stderr": 0.030898610882477515, "acc_norm": 0.695067264573991, "acc_norm_stderr": 0.030898610882477515 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.7099236641221374, "acc_stderr": 0.03980066246467765, "acc_norm": 0.7099236641221374, "acc_norm_stderr": 0.03980066246467765 }, "harness|hendrycksTest-international_law|5": { "acc": 0.7851239669421488, "acc_stderr": 0.03749492448709695, "acc_norm": 0.7851239669421488, "acc_norm_stderr": 0.03749492448709695 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.7407407407407407, "acc_stderr": 0.04236511258094633, "acc_norm": 0.7407407407407407, "acc_norm_stderr": 0.04236511258094633 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.7484662576687117, "acc_stderr": 0.03408997886857529, "acc_norm": 0.7484662576687117, "acc_norm_stderr": 0.03408997886857529 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.4732142857142857, "acc_stderr": 0.047389751192741546, "acc_norm": 0.4732142857142857, "acc_norm_stderr": 0.047389751192741546 }, "harness|hendrycksTest-management|5": { "acc": 0.7378640776699029, "acc_stderr": 0.04354631077260595, "acc_norm": 0.7378640776699029, "acc_norm_stderr": 0.04354631077260595 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8803418803418803, "acc_stderr": 0.021262719400406964, "acc_norm": 0.8803418803418803, "acc_norm_stderr": 0.021262719400406964 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.7, "acc_stderr": 0.046056618647183814, "acc_norm": 0.7, "acc_norm_stderr": 0.046056618647183814 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.8084291187739464, "acc_stderr": 0.014072859310451949, "acc_norm": 0.8084291187739464, "acc_norm_stderr": 0.014072859310451949 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.6878612716763006, "acc_stderr": 0.024946792225272314, "acc_norm": 0.6878612716763006, "acc_norm_stderr": 0.024946792225272314 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.4312849162011173, "acc_stderr": 0.016563829399047707, "acc_norm": 0.4312849162011173, "acc_norm_stderr": 0.016563829399047707 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.7026143790849673, "acc_stderr": 0.02617390850671858, "acc_norm": 0.7026143790849673, "acc_norm_stderr": 0.02617390850671858 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.6945337620578779, "acc_stderr": 0.026160584450140453, "acc_norm": 0.6945337620578779, "acc_norm_stderr": 0.026160584450140453 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.6882716049382716, "acc_stderr": 0.025773111169630457, "acc_norm": 0.6882716049382716, "acc_norm_stderr": 0.025773111169630457 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.4397163120567376, "acc_stderr": 0.02960991207559411, "acc_norm": 0.4397163120567376, "acc_norm_stderr": 0.02960991207559411 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.4576271186440678, "acc_stderr": 0.012724296550980188, "acc_norm": 0.4576271186440678, "acc_norm_stderr": 0.012724296550980188 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.6691176470588235, "acc_stderr": 0.028582709753898445, "acc_norm": 0.6691176470588235, "acc_norm_stderr": 0.028582709753898445 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.6160130718954249, "acc_stderr": 0.019675808135281508, "acc_norm": 0.6160130718954249, "acc_norm_stderr": 0.019675808135281508 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6909090909090909, "acc_stderr": 0.044262946482000985, "acc_norm": 0.6909090909090909, "acc_norm_stderr": 0.044262946482000985 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.6938775510204082, "acc_stderr": 0.029504896454595957, "acc_norm": 0.6938775510204082, "acc_norm_stderr": 0.029504896454595957 }, "harness|hendrycksTest-sociology|5": { "acc": 0.8507462686567164, "acc_stderr": 0.02519692987482707, "acc_norm": 0.8507462686567164, "acc_norm_stderr": 0.02519692987482707 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.87, "acc_stderr": 0.03379976689896309, "acc_norm": 0.87, "acc_norm_stderr": 0.03379976689896309 }, "harness|hendrycksTest-virology|5": { "acc": 0.4879518072289157, "acc_stderr": 0.0389136449583582, "acc_norm": 0.4879518072289157, "acc_norm_stderr": 0.0389136449583582 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.8421052631578947, "acc_stderr": 0.027966785859160882, "acc_norm": 0.8421052631578947, "acc_norm_stderr": 0.027966785859160882 }, "harness|truthfulqa:mc|0": { "mc1": 0.46878824969400246, "mc1_stderr": 0.017469364874577537, "mc2": 0.6394506791157332, "mc2_stderr": 0.015272071804569947 }, "harness|winogrande|5": { "acc": 0.7900552486187845, "acc_stderr": 0.01144628062926263 }, "harness|gsm8k|5": { "acc": 0.5178165276724791, "acc_stderr": 0.013763738379867933 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_jeiku__NarrativeNexus_7B
[ "region:us" ]
2024-02-16T01:32:47+00:00
{"pretty_name": "Evaluation run of jeiku/NarrativeNexus_7B", "dataset_summary": "Dataset automatically created during the evaluation run of model [jeiku/NarrativeNexus_7B](https://huggingface.co/jeiku/NarrativeNexus_7B) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_jeiku__NarrativeNexus_7B\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-02-16T01:30:29.349287](https://huggingface.co/datasets/open-llm-leaderboard/details_jeiku__NarrativeNexus_7B/blob/main/results_2024-02-16T01-30-29.349287.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.6331502373053775,\n \"acc_stderr\": 0.032649477056743835,\n \"acc_norm\": 0.6360612367088411,\n \"acc_norm_stderr\": 0.03330403787596569,\n \"mc1\": 0.46878824969400246,\n \"mc1_stderr\": 0.017469364874577537,\n \"mc2\": 0.6394506791157332,\n \"mc2_stderr\": 0.015272071804569947\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.6279863481228669,\n \"acc_stderr\": 0.01412459788184446,\n \"acc_norm\": 0.6612627986348123,\n \"acc_norm_stderr\": 0.01383056892797433\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6773551085441147,\n \"acc_stderr\": 0.004665327309399188,\n \"acc_norm\": 0.8573989245170285,\n \"acc_norm_stderr\": 0.003489509493001621\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.36,\n \"acc_stderr\": 0.04824181513244218,\n \"acc_norm\": 0.36,\n \"acc_norm_stderr\": 0.04824181513244218\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.6296296296296297,\n \"acc_stderr\": 0.041716541613545426,\n \"acc_norm\": 0.6296296296296297,\n \"acc_norm_stderr\": 0.041716541613545426\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.6776315789473685,\n \"acc_stderr\": 0.03803510248351585,\n \"acc_norm\": 0.6776315789473685,\n \"acc_norm_stderr\": 0.03803510248351585\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.54,\n \"acc_stderr\": 0.05009082659620332,\n \"acc_norm\": 0.54,\n \"acc_norm_stderr\": 0.05009082659620332\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.6943396226415094,\n \"acc_stderr\": 0.028353298073322663,\n \"acc_norm\": 0.6943396226415094,\n \"acc_norm_stderr\": 0.028353298073322663\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.7638888888888888,\n \"acc_stderr\": 0.03551446610810826,\n \"acc_norm\": 0.7638888888888888,\n \"acc_norm_stderr\": 0.03551446610810826\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.48,\n \"acc_stderr\": 0.050211673156867795,\n \"acc_norm\": 0.48,\n \"acc_norm_stderr\": 0.050211673156867795\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.55,\n \"acc_stderr\": 0.05,\n \"acc_norm\": 0.55,\n \"acc_norm_stderr\": 0.05\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.3,\n \"acc_stderr\": 0.046056618647183814,\n \"acc_norm\": 0.3,\n \"acc_norm_stderr\": 0.046056618647183814\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.6358381502890174,\n \"acc_stderr\": 0.03669072477416907,\n \"acc_norm\": 0.6358381502890174,\n \"acc_norm_stderr\": 0.03669072477416907\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.3431372549019608,\n \"acc_stderr\": 0.04724007352383888,\n \"acc_norm\": 0.3431372549019608,\n \"acc_norm_stderr\": 0.04724007352383888\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.72,\n \"acc_stderr\": 0.04512608598542127,\n \"acc_norm\": 0.72,\n \"acc_norm_stderr\": 0.04512608598542127\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.5787234042553191,\n \"acc_stderr\": 0.03227834510146268,\n \"acc_norm\": 0.5787234042553191,\n \"acc_norm_stderr\": 0.03227834510146268\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.47368421052631576,\n \"acc_stderr\": 0.046970851366478626,\n \"acc_norm\": 0.47368421052631576,\n \"acc_norm_stderr\": 0.046970851366478626\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.5793103448275863,\n \"acc_stderr\": 0.04113914981189261,\n \"acc_norm\": 0.5793103448275863,\n \"acc_norm_stderr\": 0.04113914981189261\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.3888888888888889,\n \"acc_stderr\": 0.025107425481137285,\n \"acc_norm\": 0.3888888888888889,\n \"acc_norm_stderr\": 0.025107425481137285\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.46825396825396826,\n \"acc_stderr\": 0.04463112720677171,\n \"acc_norm\": 0.46825396825396826,\n \"acc_norm_stderr\": 0.04463112720677171\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.42,\n \"acc_stderr\": 0.049604496374885836,\n \"acc_norm\": 0.42,\n \"acc_norm_stderr\": 0.049604496374885836\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.7645161290322581,\n \"acc_stderr\": 0.02413763242933771,\n \"acc_norm\": 0.7645161290322581,\n \"acc_norm_stderr\": 0.02413763242933771\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.5123152709359606,\n \"acc_stderr\": 0.035169204442208966,\n \"acc_norm\": 0.5123152709359606,\n \"acc_norm_stderr\": 0.035169204442208966\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.68,\n \"acc_stderr\": 0.046882617226215034,\n \"acc_norm\": 0.68,\n \"acc_norm_stderr\": 0.046882617226215034\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.7636363636363637,\n \"acc_stderr\": 0.033175059300091805,\n \"acc_norm\": 0.7636363636363637,\n \"acc_norm_stderr\": 0.033175059300091805\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.7929292929292929,\n \"acc_stderr\": 0.028869778460267042,\n \"acc_norm\": 0.7929292929292929,\n \"acc_norm_stderr\": 0.028869778460267042\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.8601036269430051,\n \"acc_stderr\": 0.025033870583015184,\n \"acc_norm\": 0.8601036269430051,\n \"acc_norm_stderr\": 0.025033870583015184\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.6666666666666666,\n \"acc_stderr\": 0.023901157979402534,\n \"acc_norm\": 0.6666666666666666,\n \"acc_norm_stderr\": 0.023901157979402534\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.3888888888888889,\n \"acc_stderr\": 0.029723278961476664,\n \"acc_norm\": 0.3888888888888889,\n \"acc_norm_stderr\": 0.029723278961476664\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.6848739495798319,\n \"acc_stderr\": 0.030176808288974337,\n \"acc_norm\": 0.6848739495798319,\n \"acc_norm_stderr\": 0.030176808288974337\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.3509933774834437,\n \"acc_stderr\": 0.03896981964257375,\n \"acc_norm\": 0.3509933774834437,\n \"acc_norm_stderr\": 0.03896981964257375\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.8036697247706422,\n \"acc_stderr\": 0.01703071933915434,\n \"acc_norm\": 0.8036697247706422,\n \"acc_norm_stderr\": 0.01703071933915434\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.49074074074074076,\n \"acc_stderr\": 0.03409386946992699,\n \"acc_norm\": 0.49074074074074076,\n \"acc_norm_stderr\": 0.03409386946992699\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.7892156862745098,\n \"acc_stderr\": 0.028626547912437406,\n \"acc_norm\": 0.7892156862745098,\n \"acc_norm_stderr\": 0.028626547912437406\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.7552742616033755,\n \"acc_stderr\": 0.027985699387036423,\n \"acc_norm\": 0.7552742616033755,\n \"acc_norm_stderr\": 0.027985699387036423\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.695067264573991,\n \"acc_stderr\": 0.030898610882477515,\n \"acc_norm\": 0.695067264573991,\n \"acc_norm_stderr\": 0.030898610882477515\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.7099236641221374,\n \"acc_stderr\": 0.03980066246467765,\n \"acc_norm\": 0.7099236641221374,\n \"acc_norm_stderr\": 0.03980066246467765\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.7851239669421488,\n \"acc_stderr\": 0.03749492448709695,\n \"acc_norm\": 0.7851239669421488,\n \"acc_norm_stderr\": 0.03749492448709695\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.7407407407407407,\n \"acc_stderr\": 0.04236511258094633,\n \"acc_norm\": 0.7407407407407407,\n \"acc_norm_stderr\": 0.04236511258094633\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.7484662576687117,\n \"acc_stderr\": 0.03408997886857529,\n \"acc_norm\": 0.7484662576687117,\n \"acc_norm_stderr\": 0.03408997886857529\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.4732142857142857,\n \"acc_stderr\": 0.047389751192741546,\n \"acc_norm\": 0.4732142857142857,\n \"acc_norm_stderr\": 0.047389751192741546\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.7378640776699029,\n \"acc_stderr\": 0.04354631077260595,\n \"acc_norm\": 0.7378640776699029,\n \"acc_norm_stderr\": 0.04354631077260595\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8803418803418803,\n \"acc_stderr\": 0.021262719400406964,\n \"acc_norm\": 0.8803418803418803,\n \"acc_norm_stderr\": 0.021262719400406964\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.7,\n \"acc_stderr\": 0.046056618647183814,\n \"acc_norm\": 0.7,\n \"acc_norm_stderr\": 0.046056618647183814\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.8084291187739464,\n \"acc_stderr\": 0.014072859310451949,\n \"acc_norm\": 0.8084291187739464,\n \"acc_norm_stderr\": 0.014072859310451949\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.6878612716763006,\n \"acc_stderr\": 0.024946792225272314,\n \"acc_norm\": 0.6878612716763006,\n \"acc_norm_stderr\": 0.024946792225272314\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.4312849162011173,\n \"acc_stderr\": 0.016563829399047707,\n \"acc_norm\": 0.4312849162011173,\n \"acc_norm_stderr\": 0.016563829399047707\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.7026143790849673,\n \"acc_stderr\": 0.02617390850671858,\n \"acc_norm\": 0.7026143790849673,\n \"acc_norm_stderr\": 0.02617390850671858\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.6945337620578779,\n \"acc_stderr\": 0.026160584450140453,\n \"acc_norm\": 0.6945337620578779,\n \"acc_norm_stderr\": 0.026160584450140453\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.6882716049382716,\n \"acc_stderr\": 0.025773111169630457,\n \"acc_norm\": 0.6882716049382716,\n \"acc_norm_stderr\": 0.025773111169630457\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.4397163120567376,\n \"acc_stderr\": 0.02960991207559411,\n \"acc_norm\": 0.4397163120567376,\n \"acc_norm_stderr\": 0.02960991207559411\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.4576271186440678,\n \"acc_stderr\": 0.012724296550980188,\n \"acc_norm\": 0.4576271186440678,\n \"acc_norm_stderr\": 0.012724296550980188\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.6691176470588235,\n \"acc_stderr\": 0.028582709753898445,\n \"acc_norm\": 0.6691176470588235,\n \"acc_norm_stderr\": 0.028582709753898445\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.6160130718954249,\n \"acc_stderr\": 0.019675808135281508,\n \"acc_norm\": 0.6160130718954249,\n \"acc_norm_stderr\": 0.019675808135281508\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6909090909090909,\n \"acc_stderr\": 0.044262946482000985,\n \"acc_norm\": 0.6909090909090909,\n \"acc_norm_stderr\": 0.044262946482000985\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.6938775510204082,\n \"acc_stderr\": 0.029504896454595957,\n \"acc_norm\": 0.6938775510204082,\n \"acc_norm_stderr\": 0.029504896454595957\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.8507462686567164,\n \"acc_stderr\": 0.02519692987482707,\n \"acc_norm\": 0.8507462686567164,\n \"acc_norm_stderr\": 0.02519692987482707\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.87,\n \"acc_stderr\": 0.03379976689896309,\n \"acc_norm\": 0.87,\n \"acc_norm_stderr\": 0.03379976689896309\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.4879518072289157,\n \"acc_stderr\": 0.0389136449583582,\n \"acc_norm\": 0.4879518072289157,\n \"acc_norm_stderr\": 0.0389136449583582\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.8421052631578947,\n \"acc_stderr\": 0.027966785859160882,\n \"acc_norm\": 0.8421052631578947,\n \"acc_norm_stderr\": 0.027966785859160882\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.46878824969400246,\n \"mc1_stderr\": 0.017469364874577537,\n \"mc2\": 0.6394506791157332,\n \"mc2_stderr\": 0.015272071804569947\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7900552486187845,\n \"acc_stderr\": 0.01144628062926263\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.5178165276724791,\n \"acc_stderr\": 0.013763738379867933\n }\n}\n```", "repo_url": "https://huggingface.co/jeiku/NarrativeNexus_7B", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_02_16T01_30_29.349287", "path": ["**/details_harness|arc:challenge|25_2024-02-16T01-30-29.349287.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-02-16T01-30-29.349287.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_02_16T01_30_29.349287", "path": ["**/details_harness|gsm8k|5_2024-02-16T01-30-29.349287.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-02-16T01-30-29.349287.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_02_16T01_30_29.349287", "path": ["**/details_harness|hellaswag|10_2024-02-16T01-30-29.349287.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-02-16T01-30-29.349287.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_02_16T01_30_29.349287", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-16T01-30-29.349287.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-02-16T01-30-29.349287.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-02-16T01-30-29.349287.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-02-16T01-30-29.349287.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-16T01-30-29.349287.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-02-16T01-30-29.349287.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-02-16T01-30-29.349287.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-02-16T01-30-29.349287.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-02-16T01-30-29.349287.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-02-16T01-30-29.349287.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-02-16T01-30-29.349287.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-02-16T01-30-29.349287.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-16T01-30-29.349287.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-02-16T01-30-29.349287.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-16T01-30-29.349287.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-16T01-30-29.349287.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-02-16T01-30-29.349287.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-02-16T01-30-29.349287.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-02-16T01-30-29.349287.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-16T01-30-29.349287.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-16T01-30-29.349287.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-16T01-30-29.349287.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-02-16T01-30-29.349287.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-16T01-30-29.349287.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-16T01-30-29.349287.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-16T01-30-29.349287.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-16T01-30-29.349287.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-02-16T01-30-29.349287.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-16T01-30-29.349287.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-16T01-30-29.349287.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-16T01-30-29.349287.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-16T01-30-29.349287.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-02-16T01-30-29.349287.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-02-16T01-30-29.349287.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-02-16T01-30-29.349287.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-02-16T01-30-29.349287.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-16T01-30-29.349287.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-02-16T01-30-29.349287.parquet", "**/details_harness|hendrycksTest-management|5_2024-02-16T01-30-29.349287.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-02-16T01-30-29.349287.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-02-16T01-30-29.349287.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-02-16T01-30-29.349287.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-02-16T01-30-29.349287.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-16T01-30-29.349287.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-02-16T01-30-29.349287.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-02-16T01-30-29.349287.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-02-16T01-30-29.349287.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-02-16T01-30-29.349287.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-02-16T01-30-29.349287.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-02-16T01-30-29.349287.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-02-16T01-30-29.349287.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-02-16T01-30-29.349287.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-02-16T01-30-29.349287.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-02-16T01-30-29.349287.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-16T01-30-29.349287.parquet", "**/details_harness|hendrycksTest-virology|5_2024-02-16T01-30-29.349287.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-02-16T01-30-29.349287.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-16T01-30-29.349287.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-02-16T01-30-29.349287.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-02-16T01-30-29.349287.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-02-16T01-30-29.349287.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-16T01-30-29.349287.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-02-16T01-30-29.349287.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-02-16T01-30-29.349287.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-02-16T01-30-29.349287.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-02-16T01-30-29.349287.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-02-16T01-30-29.349287.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-02-16T01-30-29.349287.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-02-16T01-30-29.349287.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-16T01-30-29.349287.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-02-16T01-30-29.349287.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-16T01-30-29.349287.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-16T01-30-29.349287.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-02-16T01-30-29.349287.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-02-16T01-30-29.349287.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-02-16T01-30-29.349287.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-16T01-30-29.349287.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-16T01-30-29.349287.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-16T01-30-29.349287.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-02-16T01-30-29.349287.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-16T01-30-29.349287.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-16T01-30-29.349287.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-16T01-30-29.349287.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-16T01-30-29.349287.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-02-16T01-30-29.349287.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-16T01-30-29.349287.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-16T01-30-29.349287.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-16T01-30-29.349287.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-16T01-30-29.349287.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-02-16T01-30-29.349287.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-02-16T01-30-29.349287.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-02-16T01-30-29.349287.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-02-16T01-30-29.349287.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-16T01-30-29.349287.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-02-16T01-30-29.349287.parquet", "**/details_harness|hendrycksTest-management|5_2024-02-16T01-30-29.349287.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-02-16T01-30-29.349287.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-02-16T01-30-29.349287.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-02-16T01-30-29.349287.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-02-16T01-30-29.349287.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-16T01-30-29.349287.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-02-16T01-30-29.349287.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-02-16T01-30-29.349287.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-02-16T01-30-29.349287.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-02-16T01-30-29.349287.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-02-16T01-30-29.349287.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-02-16T01-30-29.349287.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-02-16T01-30-29.349287.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-02-16T01-30-29.349287.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-02-16T01-30-29.349287.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-02-16T01-30-29.349287.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-16T01-30-29.349287.parquet", "**/details_harness|hendrycksTest-virology|5_2024-02-16T01-30-29.349287.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-02-16T01-30-29.349287.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_02_16T01_30_29.349287", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-16T01-30-29.349287.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-16T01-30-29.349287.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_02_16T01_30_29.349287", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-02-16T01-30-29.349287.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-02-16T01-30-29.349287.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_02_16T01_30_29.349287", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-02-16T01-30-29.349287.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-02-16T01-30-29.349287.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_02_16T01_30_29.349287", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-02-16T01-30-29.349287.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-02-16T01-30-29.349287.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_02_16T01_30_29.349287", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-16T01-30-29.349287.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-16T01-30-29.349287.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_02_16T01_30_29.349287", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-02-16T01-30-29.349287.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-02-16T01-30-29.349287.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_02_16T01_30_29.349287", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-02-16T01-30-29.349287.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-02-16T01-30-29.349287.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_02_16T01_30_29.349287", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-02-16T01-30-29.349287.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-02-16T01-30-29.349287.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_02_16T01_30_29.349287", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-02-16T01-30-29.349287.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-02-16T01-30-29.349287.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_02_16T01_30_29.349287", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-02-16T01-30-29.349287.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-02-16T01-30-29.349287.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_02_16T01_30_29.349287", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-02-16T01-30-29.349287.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-02-16T01-30-29.349287.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_02_16T01_30_29.349287", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-02-16T01-30-29.349287.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-02-16T01-30-29.349287.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_02_16T01_30_29.349287", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-16T01-30-29.349287.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-16T01-30-29.349287.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_02_16T01_30_29.349287", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-02-16T01-30-29.349287.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-02-16T01-30-29.349287.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_02_16T01_30_29.349287", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-16T01-30-29.349287.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-16T01-30-29.349287.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_02_16T01_30_29.349287", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-16T01-30-29.349287.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-16T01-30-29.349287.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_02_16T01_30_29.349287", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-02-16T01-30-29.349287.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-02-16T01-30-29.349287.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_02_16T01_30_29.349287", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-02-16T01-30-29.349287.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-02-16T01-30-29.349287.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_02_16T01_30_29.349287", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-02-16T01-30-29.349287.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-02-16T01-30-29.349287.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_02_16T01_30_29.349287", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-16T01-30-29.349287.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-16T01-30-29.349287.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_02_16T01_30_29.349287", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-16T01-30-29.349287.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-16T01-30-29.349287.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_02_16T01_30_29.349287", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-16T01-30-29.349287.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-16T01-30-29.349287.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_02_16T01_30_29.349287", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-02-16T01-30-29.349287.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-02-16T01-30-29.349287.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_02_16T01_30_29.349287", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-16T01-30-29.349287.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-16T01-30-29.349287.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_02_16T01_30_29.349287", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-16T01-30-29.349287.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-16T01-30-29.349287.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_02_16T01_30_29.349287", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-16T01-30-29.349287.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-16T01-30-29.349287.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_02_16T01_30_29.349287", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-16T01-30-29.349287.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-16T01-30-29.349287.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_02_16T01_30_29.349287", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-02-16T01-30-29.349287.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-02-16T01-30-29.349287.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_02_16T01_30_29.349287", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-16T01-30-29.349287.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-16T01-30-29.349287.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_02_16T01_30_29.349287", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-16T01-30-29.349287.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-16T01-30-29.349287.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_02_16T01_30_29.349287", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-16T01-30-29.349287.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-16T01-30-29.349287.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_02_16T01_30_29.349287", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-16T01-30-29.349287.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-16T01-30-29.349287.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_02_16T01_30_29.349287", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-02-16T01-30-29.349287.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-02-16T01-30-29.349287.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_02_16T01_30_29.349287", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-02-16T01-30-29.349287.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-02-16T01-30-29.349287.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_02_16T01_30_29.349287", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-02-16T01-30-29.349287.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-02-16T01-30-29.349287.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_02_16T01_30_29.349287", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-02-16T01-30-29.349287.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-02-16T01-30-29.349287.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_02_16T01_30_29.349287", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-16T01-30-29.349287.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-16T01-30-29.349287.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_02_16T01_30_29.349287", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-02-16T01-30-29.349287.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-02-16T01-30-29.349287.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_02_16T01_30_29.349287", "path": ["**/details_harness|hendrycksTest-management|5_2024-02-16T01-30-29.349287.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-02-16T01-30-29.349287.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_02_16T01_30_29.349287", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-02-16T01-30-29.349287.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-02-16T01-30-29.349287.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_02_16T01_30_29.349287", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-02-16T01-30-29.349287.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-02-16T01-30-29.349287.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_02_16T01_30_29.349287", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-02-16T01-30-29.349287.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-02-16T01-30-29.349287.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_02_16T01_30_29.349287", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-02-16T01-30-29.349287.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-02-16T01-30-29.349287.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_02_16T01_30_29.349287", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-16T01-30-29.349287.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-16T01-30-29.349287.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_02_16T01_30_29.349287", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-02-16T01-30-29.349287.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-02-16T01-30-29.349287.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_02_16T01_30_29.349287", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-02-16T01-30-29.349287.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-02-16T01-30-29.349287.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_02_16T01_30_29.349287", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-02-16T01-30-29.349287.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-02-16T01-30-29.349287.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_02_16T01_30_29.349287", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-02-16T01-30-29.349287.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-02-16T01-30-29.349287.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_02_16T01_30_29.349287", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-02-16T01-30-29.349287.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-02-16T01-30-29.349287.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_02_16T01_30_29.349287", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-02-16T01-30-29.349287.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-02-16T01-30-29.349287.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_02_16T01_30_29.349287", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-02-16T01-30-29.349287.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-02-16T01-30-29.349287.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_02_16T01_30_29.349287", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-02-16T01-30-29.349287.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-02-16T01-30-29.349287.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_02_16T01_30_29.349287", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-02-16T01-30-29.349287.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-02-16T01-30-29.349287.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_02_16T01_30_29.349287", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-02-16T01-30-29.349287.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-02-16T01-30-29.349287.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_02_16T01_30_29.349287", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-16T01-30-29.349287.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-16T01-30-29.349287.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_02_16T01_30_29.349287", "path": ["**/details_harness|hendrycksTest-virology|5_2024-02-16T01-30-29.349287.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-02-16T01-30-29.349287.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_02_16T01_30_29.349287", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-02-16T01-30-29.349287.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-02-16T01-30-29.349287.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_02_16T01_30_29.349287", "path": ["**/details_harness|truthfulqa:mc|0_2024-02-16T01-30-29.349287.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-02-16T01-30-29.349287.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_02_16T01_30_29.349287", "path": ["**/details_harness|winogrande|5_2024-02-16T01-30-29.349287.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-02-16T01-30-29.349287.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_02_16T01_30_29.349287", "path": ["results_2024-02-16T01-30-29.349287.parquet"]}, {"split": "latest", "path": ["results_2024-02-16T01-30-29.349287.parquet"]}]}]}
2024-02-16T01:33:12+00:00
ecc5ad5859783aac2628568b9a6debbcb43975c6
Raspberry-ai/ref_image_retrieval_v2_isabel_madewell_veronica
[ "region:us" ]
2024-02-16T01:40:56+00:00
{"dataset_info": {"features": [{"name": "caption", "dtype": "string"}, {"name": "source", "dtype": "string"}, {"name": "url", "dtype": "string"}, {"name": "embedding", "sequence": "float32"}, {"name": "image_uri", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 9532899, "num_examples": 3897}], "download_size": 11042284, "dataset_size": 9532899}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]}
2024-02-16T01:40:58+00:00
3d10fbafbe7dc4c2ece5ec8b03ab0920704b2dd8
franlucc/starcoderbase-3b-completions_typeinf_analysis
[ "region:us" ]
2024-02-16T01:58:26+00:00
{"dataset_info": {"features": [{"name": "generated", "dtype": "string"}, {"name": "solution", "dtype": "string"}, {"name": "hexsha", "dtype": "string"}, {"name": "prompt", "dtype": "string"}, {"name": "correctness", "dtype": "string"}, {"name": "id", "dtype": "int64"}], "splits": [{"name": "train", "num_bytes": 27150344, "num_examples": 5313}], "download_size": 5176569, "dataset_size": 27150344}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]}
2024-02-16T01:58:27+00:00
ecb19e3f97e5c464bbcf19a78b6af55964afa7b8
mesolitica/noisy-rephrase
[ "region:us" ]
2024-02-16T02:00:45+00:00
{}
2024-02-16T03:19:23+00:00
5ccd2361ba28f3f07c37f184d445bb3c7c0e577c
lab42/grasp-conditioned-hindrance-v3.4
[ "region:us" ]
2024-02-16T02:13:50+00:00
{"dataset_info": {"features": [{"name": "image_0", "dtype": "image"}, {"name": "image_1", "dtype": "image"}, {"name": "image_2", "dtype": "image"}, {"name": "images_rest", "sequence": "image"}, {"name": "mask_0", "dtype": "image"}, {"name": "mask_1", "dtype": "image"}, {"name": "mask_2", "dtype": "image"}, {"name": "masks_rest", "sequence": "image"}, {"name": "conversations", "dtype": "string"}, {"name": "id", "dtype": "string"}, {"name": "dataset", "dtype": "string"}, {"name": "split", "dtype": "string"}, {"name": "n_images", "dtype": "int32"}, {"name": "n_masks", "dtype": "int32"}, {"name": "n_conversations", "dtype": "int32"}], "splits": [{"name": "train", "num_bytes": 37813735.0, "num_examples": 41}, {"name": "validation", "num_bytes": 17486270.0, "num_examples": 19}], "download_size": 55219472, "dataset_size": 55300005.0}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}, {"split": "validation", "path": "data/validation-*"}]}]}
2024-02-16T02:14:02+00:00
88326967444e823e142e4e010139c82b1b76cfc2
dhilgaertner/cod-profile-info-dataset
[ "size_categories:n<1K", "language:en", "region:us" ]
2024-02-16T02:18:22+00:00
{"language": ["en"], "size_categories": ["n<1K"]}
2024-02-16T02:25:53+00:00
90451914c3fca8621fb8ba377beb88da3e7b4adc
# Dataset Card for "Test_Dataset_1K" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
ouvic215/Test_Dataset_1K
[ "region:us" ]
2024-02-16T02:23:19+00:00
{"dataset_info": {"features": [{"name": "mask_image", "dtype": "image"}, {"name": "text", "dtype": "string"}, {"name": "image", "dtype": "image"}], "splits": [{"name": "train", "num_bytes": 147332332.0, "num_examples": 1588}], "download_size": 146499523, "dataset_size": 147332332.0}}
2024-02-16T02:23:49+00:00
96f536570316ef052012ff5182e10316e3ac591e
yan2069/ProfessorGarlick
[ "license:openrail", "region:us" ]
2024-02-16T02:24:00+00:00
{"license": "openrail"}
2024-02-16T02:30:18+00:00
effb9731cf538da7e6398371760a81d6b7fb5ff8
benayas/banking_augmented_5pct_v0
[ "region:us" ]
2024-02-16T02:30:45+00:00
{"dataset_info": {"features": [{"name": "text", "dtype": "string"}, {"name": "category", "dtype": "string"}, {"name": "__index_level_0__", "dtype": "int64"}], "splits": [{"name": "train", "num_bytes": 1040273, "num_examples": 10003}], "download_size": 407790, "dataset_size": 1040273}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]}
2024-02-16T02:30:48+00:00
fef084ca60b690b044c1654b1089080ebdf71967
Jacklab/pixiv_v1
[ "license:gpl-3.0", "region:us" ]
2024-02-16T02:42:20+00:00
{"license": "gpl-3.0"}
2024-02-16T03:29:45+00:00
f9578abd8ffa972d85921b1072148184bd98625f
wangxingjun778/test_dogs_and_cats
[ "license:apache-2.0", "region:us" ]
2024-02-16T02:42:34+00:00
{"license": "apache-2.0"}
2024-02-16T03:17:27+00:00
7d29f635111f692fc91b40b425059c2ba9c9da41
manishiitg/berkeley-nest-Nectar
[ "region:us" ]
2024-02-16T02:44:51+00:00
{"dataset_info": {"features": [{"name": "org_dataset", "dtype": "string"}, {"name": "uniq_id", "dtype": "string"}, {"name": "en_messages", "list": [{"name": "content", "dtype": "string"}, {"name": "role", "dtype": "string"}]}, {"name": "hi_messages", "list": [{"name": "content", "dtype": "string"}, {"name": "role", "dtype": "string"}]}], "splits": [{"name": "train", "num_bytes": 788010420, "num_examples": 180461}], "download_size": 303565053, "dataset_size": 788010420}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]}
2024-02-17T04:30:31+00:00
cc779b07c502b1ab061f5ea2ad562db7234be298
manishiitg/teknium-GPTeacher-General-Instruct
[ "region:us" ]
2024-02-16T02:51:10+00:00
{"dataset_info": {"features": [{"name": "system", "dtype": "string"}, {"name": "instruction", "dtype": "string"}, {"name": "response", "dtype": "string"}, {"name": "lang", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 209818583, "num_examples": 178520}], "download_size": 99430551, "dataset_size": 209818583}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]}
2024-02-17T04:16:54+00:00
6d48ce683e3a633e78fdda21183064824ac7f3fe
manishiitg/ai2_arc
[ "region:us" ]
2024-02-16T02:51:24+00:00
{"dataset_info": {"features": [{"name": "system", "dtype": "string"}, {"name": "instruction", "dtype": "string"}, {"name": "response", "dtype": "string"}, {"name": "lang", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 2318104, "num_examples": 4502}], "download_size": 674650, "dataset_size": 2318104}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]}
2024-02-17T04:17:12+00:00
2619ffe280f4816149a86ff488984e347d1d7e16
jlbaker361/hacs-segment-pairs
[ "region:us" ]
2024-02-16T02:53:53+00:00
{"dataset_info": {"features": [{"name": "src_image", "dtype": "image"}, {"name": "src_pose", "dtype": "image"}, {"name": "target_image", "dtype": "image"}, {"name": "label", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 991404.0, "num_examples": 4}], "download_size": 1000950, "dataset_size": 991404.0}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]}
2024-02-16T04:25:38+00:00
b177233b46a174f19003ed1cbe1caf36541e9068
manishiitg/databricks-databricks-dolly-15k
[ "region:us" ]
2024-02-16T02:55:11+00:00
{"dataset_info": {"features": [{"name": "system", "dtype": "string"}, {"name": "instruction", "dtype": "string"}, {"name": "response", "dtype": "string"}, {"name": "lang", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 44799212, "num_examples": 29980}], "download_size": 20957197, "dataset_size": 44799212}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]}
2024-02-17T04:16:45+00:00
90485c7ec0ee3d8278ab1dab86a3fcefc9616e3d
This is an LLM rated version of **euclaise/reddit-instruct-curated**, which is already a good dataset imo. Only **post titles** and **comment texts** were rated as post texts can be confusing due to edits and seemingly out of context information. First, **I filtered examples with <250 comment score**. Of course this is not a very efficient filtering as some pairs might have references to other comments or simply be unhelpful, yet upvoted due to Reddit hivemind. Next I sent the example pairs with a rating prompt to Senku-Q2-XS and collected the numeric votes **(out of 10)**. Overall there aren't many low rated examples. Here are three "worst" examples: ![image/png](https://cdn-uploads.huggingface.co/production/uploads/6324eabf05bd8a54c6eb1650/lxj7BGeJXqgRwtx3UoPlU.png) There are only 66 examples with <6 rate. An example of highly upvoted but poorly rated pair: ![image/png](https://cdn-uploads.huggingface.co/production/uploads/6324eabf05bd8a54c6eb1650/u6wsjzeHNnN4OGPWplyXe.png) **Let me know if I fucked up anything, I still have no idea what I am doing honestly.**
Ba2han/Reddit-instruct-curated_rated-1.2k
[ "size_categories:1K<n<10K", "language:en", "license:mit", "region:us" ]
2024-02-16T03:14:47+00:00
{"language": ["en"], "license": "mit", "size_categories": ["1K<n<10K"]}
2024-02-16T03:50:26+00:00
88f10b1844a5d9330998736c30800510cb17a896
franlucc/starcoderbase-1b-completions-cheeky_typeinf_analysis
[ "region:us" ]
2024-02-16T03:21:14+00:00
{"dataset_info": {"features": [{"name": "generated", "dtype": "string"}, {"name": "solution", "dtype": "string"}, {"name": "hexsha", "dtype": "string"}, {"name": "prompt", "dtype": "string"}, {"name": "correctness", "dtype": "string"}, {"name": "id", "dtype": "int64"}], "splits": [{"name": "train", "num_bytes": 27377274, "num_examples": 5313}], "download_size": 5382971, "dataset_size": 27377274}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]}
2024-02-16T03:21:15+00:00
7cd7243e4f7c07090530db29083d02fc34217020
# Dataset Card for "Test_Dataset_1K-0216" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
ouvic215/Test_Dataset_1K-0216
[ "region:us" ]
2024-02-16T03:22:22+00:00
{"dataset_info": {"features": [{"name": "mask_image", "dtype": "image"}, {"name": "text", "dtype": "string"}, {"name": "image", "dtype": "image"}], "splits": [{"name": "train", "num_bytes": 147332332.0, "num_examples": 1588}], "download_size": 146499523, "dataset_size": 147332332.0}}
2024-02-16T03:22:49+00:00
d5ea52d256f7444e4bd6ed5aa0715b2d397789b9
beta-reduction/webcrawl-202401
[ "license:cc-by-sa-3.0", "region:us" ]
2024-02-16T03:23:07+00:00
{"license": "cc-by-sa-3.0"}
2024-02-16T03:52:17+00:00
5f036a24efb58c8445c2b647dd24ff7757cf6340
wanz/jyshuju
[ "region:us" ]
2024-02-16T03:27:19+00:00
{}
2024-02-16T07:05:57+00:00
fcca893e56e3a1f25286b13ab93070a202a37abe
Tarmana/pendahuluan_jurnal_pertanian
[ "task_categories:summarization", "size_categories:n<1K", "language:id", "region:us" ]
2024-02-16T03:29:07+00:00
{"language": ["id"], "size_categories": ["n<1K"], "task_categories": ["summarization"]}
2024-02-16T03:30:21+00:00
9a72476c0dc833b829d1b99b521ffa99a096b811
# Dataset This repository contains the final dataset created using various resources. The primary datasets used for the construction of this final dataset are: - [Telugu NLP Dataset from Kaggle](https://www.kaggle.com/datasets/sudalairajkumar/telugu-nlp) - [Telugu ASR Corpus from HuggingFace](https://huggingface.co/datasets/parambharat/telugu_asr_corpus) - [Wikipedia Telugu Dataset from Wikimedia on HuggingFace](https://huggingface.co/datasets/wikimedia/wikipedia) These datasets have been combined to form a comprehensive resource for Telugu Natural Language Processing (NLP) tasks.
indiehackers/telugu_dataset
[ "region:us" ]
2024-02-16T03:32:53+00:00
{"dataset_info": [{"config_name": "telugu_asr", "features": [{"name": "sentence", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 47887486, "num_examples": 209270}], "download_size": 20219871, "dataset_size": 47887486}, {"config_name": "telugu_nlp", "features": [{"name": "text", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 387671180, "num_examples": 47415}], "download_size": 150012515, "dataset_size": 387671180}, {"config_name": "wikipedia", "features": [{"name": "text", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 710613522, "num_examples": 87854}], "download_size": 209754217, "dataset_size": 710613522}], "configs": [{"config_name": "telugu_asr", "data_files": [{"split": "train", "path": "telugu_asr/train-*"}]}, {"config_name": "telugu_nlp", "data_files": [{"split": "train", "path": "telugu_nlp/train-*"}]}, {"config_name": "wikipedia", "data_files": [{"split": "train", "path": "wikipedia/train-*"}]}]}
2024-02-16T03:40:32+00:00
9e242c6c2d693b7159fb8db57178c780bc925077
trappy/katland_cat_XL
[ "region:us" ]
2024-02-16T03:33:37+00:00
{}
2024-02-16T03:34:28+00:00
0c28e236eb0101cbe1e3da3cbce930b01de669ba
TJohnson250/OGSDI
[ "license:apache-2.0", "region:us" ]
2024-02-16T03:34:48+00:00
{"license": "apache-2.0"}
2024-02-16T03:41:24+00:00
b3dd583ce90282a9084fb4e4b3b7ca8978cc9abb
# Dataset This repository contains the final dataset created using various resources. The primary datasets used for the construction of this final dataset are: - [Telugu NLP Dataset from Kaggle](https://www.kaggle.com/datasets/sudalairajkumar/telugu-nlp) - [Telugu ASR Corpus from HuggingFace](https://huggingface.co/datasets/parambharat/telugu_asr_corpus) - [Wikipedia Telugu Dataset from Wikimedia on HuggingFace](https://huggingface.co/datasets/wikimedia/wikipedia) These datasets have been combined to form a comprehensive resource for Telugu Natural Language Processing (NLP) tasks.
eswardivi/telugu_dataset
[ "region:us" ]
2024-02-16T03:35:51+00:00
{"dataset_info": [{"config_name": "telugu_asr", "features": [{"name": "sentence", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 47887486, "num_examples": 209270}], "download_size": 20219871, "dataset_size": 47887486}, {"config_name": "telugu_nlp", "features": [{"name": "text", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 387671180, "num_examples": 47415}], "download_size": 150012515, "dataset_size": 387671180}, {"config_name": "wikipedia", "features": [{"name": "text", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 710613522, "num_examples": 87854}], "download_size": 209754217, "dataset_size": 710613522}], "configs": [{"config_name": "telugu_asr", "data_files": [{"split": "train", "path": "telugu_asr/train-*"}]}, {"config_name": "telugu_nlp", "data_files": [{"split": "train", "path": "telugu_nlp/train-*"}]}, {"config_name": "wikipedia", "data_files": [{"split": "train", "path": "wikipedia/train-*"}]}]}
2024-02-16T03:39:52+00:00
659935c6f026666ee8238090a3697c9d39116260
Sophialucky/WheelScrewDemo
[ "region:us" ]
2024-02-16T03:46:25+00:00
{}
2024-02-17T10:23:37+00:00
3cc0ea4cbc9f46eb2f18299fcdd05fca063ca2ff
presencesw/hash_fid_vi_sample
[ "region:us" ]
2024-02-16T03:55:26+00:00
{"dataset_info": {"features": [{"name": "question", "dtype": "string"}, {"name": "answers", "sequence": "string"}, {"name": "ctxs", "sequence": "string"}], "splits": [{"name": "train", "num_bytes": 3494014, "num_examples": 1000}, {"name": "validation", "num_bytes": 3177477, "num_examples": 917}, {"name": "test", "num_bytes": 1260384, "num_examples": 362}], "download_size": 3583868, "dataset_size": 7931875}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}, {"split": "validation", "path": "data/validation-*"}, {"split": "test", "path": "data/test-*"}]}]}
2024-02-17T03:10:26+00:00
f998e7a82884ba1b24397a871f1a7d2f478459f7
H34lthy/Isotropy
[ "license:mit", "region:us" ]
2024-02-16T03:56:32+00:00
{"license": "mit"}
2024-02-16T03:57:57+00:00
467d4630cecf4064fa289da00cc4e033e4da6ed7
djvictordance/vocal
[ "license:openrail", "region:us" ]
2024-02-16T04:02:44+00:00
{"license": "openrail"}
2024-02-16T04:06:05+00:00
a5a244a457e122977da71affda04262c5d0aed09
justmalhar/aya_dataset_hindi
[ "region:us" ]
2024-02-16T04:09:08+00:00
{"dataset_info": {"features": [{"name": "inputs", "dtype": "string"}, {"name": "targets", "dtype": "string"}, {"name": "language", "dtype": "string"}, {"name": "language_code", "dtype": "string"}, {"name": "annotation_type", "dtype": "string"}, {"name": "user_id", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 1453695.8054298195, "num_examples": 1155}, {"name": "test", "num_bytes": 0.0, "num_examples": 0}], "download_size": 599486, "dataset_size": 1453695.8054298195}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}, {"split": "test", "path": "data/test-*"}]}]}
2024-02-16T04:09:17+00:00
1053ec6c8bedc4810d2b2242881e8ad6cabe7b2a
benayas/banking_augmented_10pct_v0
[ "region:us" ]
2024-02-16T04:12:42+00:00
{"dataset_info": {"features": [{"name": "text", "dtype": "string"}, {"name": "category", "dtype": "string"}, {"name": "__index_level_0__", "dtype": "int64"}], "splits": [{"name": "train", "num_bytes": 1018873, "num_examples": 10003}], "download_size": 411598, "dataset_size": 1018873}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]}
2024-02-16T04:12:44+00:00
5190b4db7ab7037775447965088ae8f29928883a
karthikshf/cvparser
[ "region:us" ]
2024-02-16T04:22:02+00:00
{}
2024-02-16T04:22:49+00:00
c91543bdc6ca004c4f0bbcb04be03dcda09caafe
djvictordance/vocal2
[ "license:openrail", "region:us" ]
2024-02-16T04:22:04+00:00
{"license": "openrail"}
2024-02-16T04:23:51+00:00
91527921b78194abda879d03e23de72a1aa77233
ayrus08/medcodgoogle
[ "region:us" ]
2024-02-16T04:24:17+00:00
{}
2024-02-16T04:25:25+00:00
6f230bf6194f811930b4208f3a8f253125a236e1
Utsav2001/Context-Orca
[ "region:us" ]
2024-02-16T04:33:10+00:00
{}
2024-02-16T04:38:56+00:00
46cec9c2b00f0775d81f888c169e9de1ff003dd2
krishan-CSE/HatEval_Relabled_with_Emotion_Author
[ "license:apache-2.0", "region:us" ]
2024-02-16T04:35:31+00:00
{"license": "apache-2.0"}
2024-02-16T04:35:47+00:00
d061ff56c1a3cabdbd47c91630c947bebf1c5d9f
chansung/test-dataset-repo
[ "region:us" ]
2024-02-16T04:35:56+00:00
{"dataset_info": {"features": [{"name": "conversations", "list": [{"name": "assistant", "dtype": "string"}, {"name": "user", "dtype": "string"}]}], "splits": [{"name": "train", "num_bytes": 15024, "num_examples": 12}], "download_size": 11446, "dataset_size": 15024}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]}
2024-02-16T05:11:55+00:00
d4614d53c5e02a57e720c36b1ed9b306980848b1
jlbaker361/cyberpunk-lite-500-cropped
[ "region:us" ]
2024-02-16T04:40:06+00:00
{"dataset_info": {"features": [{"name": "image", "dtype": "image"}, {"name": "text", "dtype": "string"}, {"name": "frame", "dtype": "int64"}, {"name": "title", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 5417030.0, "num_examples": 24}], "download_size": 5421221, "dataset_size": 5417030.0}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]}
2024-02-16T04:40:07+00:00
2a1420176a8573433736795f8ea07c7fa19f7003
karthikshf/lcv
[ "region:us" ]
2024-02-16T04:40:15+00:00
{}
2024-02-16T04:42:02+00:00
29369aa7ebfa0c6a5184d742039373e92c1d1fac
wisenut-nlp-team/poc_small_negative
[ "region:us" ]
2024-02-16T04:47:12+00:00
{"dataset_info": {"features": [{"name": "id", "dtype": "string"}, {"name": "title", "dtype": "string"}, {"name": "question", "dtype": "string"}, {"name": "context", "sequence": "string"}, {"name": "answer", "sequence": "string"}, {"name": "original_answer", "sequence": "string"}, {"name": "similar_contexts", "sequence": "string"}], "splits": [{"name": "train", "num_bytes": 9186186206.087706, "num_examples": 503927}], "download_size": 3999833332, "dataset_size": 9186186206.087706}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]}
2024-02-16T04:56:02+00:00
b9aab1ab5834df51364f0b5d112978cfaa479851
chunping-vi/pair_files
[ "region:us" ]
2024-02-16T04:48:02+00:00
{}
2024-02-16T08:38:44+00:00
b4e8f468d7764c4f03a4133a4b3c779ed0876ce2
jlbaker361/cyberpunk-lite-1000
[ "region:us" ]
2024-02-16T05:00:41+00:00
{"dataset_info": {"features": [{"name": "image", "dtype": "image"}, {"name": "text", "dtype": "string"}, {"name": "frame", "dtype": "int64"}, {"name": "title", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 2642369.0, "num_examples": 12}], "download_size": 2647158, "dataset_size": 2642369.0}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]}
2024-02-16T05:00:42+00:00
c52f633509b7efe3793c39b94f14499e9640f850
pradeep239/philp_plain_only5Years
[ "license:mit", "region:us" ]
2024-02-16T05:03:10+00:00
{"license": "mit", "dataset_info": {"features": [{"name": "image", "dtype": "image"}, {"name": "ground_truth", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 377350598.0, "num_examples": 793}, {"name": "validation", "num_bytes": 43159301.0, "num_examples": 94}, {"name": "test", "num_bytes": 22114074.0, "num_examples": 47}], "download_size": 320085727, "dataset_size": 442623973.0}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}, {"split": "validation", "path": "data/validation-*"}, {"split": "test", "path": "data/test-*"}]}]}
2024-02-16T06:17:42+00:00
1464e237ae0454afff2ec5c754067b48eaba8c04
bala1524/drug-comb-data
[ "task_categories:question-answering", "size_categories:n<1K", "language:en", "license:apache-2.0", "biology", "medical", "region:us" ]
2024-02-16T05:17:14+00:00
{"language": ["en"], "license": "apache-2.0", "size_categories": ["n<1K"], "task_categories": ["question-answering"], "pretty_name": "drug comb", "tags": ["biology", "medical"]}
2024-02-16T05:20:12+00:00
cff0a10b314f47a52f9646ce3c38ec3b7d0b4852
# Dataset Card for Evaluation run of NLUHOPOE/test-case-0 <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [NLUHOPOE/test-case-0](https://huggingface.co/NLUHOPOE/test-case-0) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_NLUHOPOE__test-case-0", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-02-16T05:25:06.093843](https://huggingface.co/datasets/open-llm-leaderboard/details_NLUHOPOE__test-case-0/blob/main/results_2024-02-16T05-25-06.093843.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.5791278236658676, "acc_stderr": 0.033494817808173614, "acc_norm": 0.5837595891503912, "acc_norm_stderr": 0.03419368461778056, "mc1": 0.3268053855569155, "mc1_stderr": 0.01641987473113503, "mc2": 0.4880155663864428, "mc2_stderr": 0.015371746911854285 }, "harness|arc:challenge|25": { "acc": 0.5349829351535836, "acc_stderr": 0.01457558392201967, "acc_norm": 0.5750853242320819, "acc_norm_stderr": 0.014445698968520769 }, "harness|hellaswag|10": { "acc": 0.5997809201354312, "acc_stderr": 0.004889413126208774, "acc_norm": 0.796355307707628, "acc_norm_stderr": 0.004018847286468061 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.31, "acc_stderr": 0.04648231987117316, "acc_norm": 0.31, "acc_norm_stderr": 0.04648231987117316 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.4888888888888889, "acc_stderr": 0.04318275491977976, "acc_norm": 0.4888888888888889, "acc_norm_stderr": 0.04318275491977976 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.5855263157894737, "acc_stderr": 0.04008973785779206, "acc_norm": 0.5855263157894737, "acc_norm_stderr": 0.04008973785779206 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.53, "acc_stderr": 0.050161355804659205, "acc_norm": 0.53, "acc_norm_stderr": 0.050161355804659205 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.6490566037735849, "acc_stderr": 0.02937364625323469, "acc_norm": 0.6490566037735849, "acc_norm_stderr": 0.02937364625323469 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.6736111111111112, "acc_stderr": 0.03921067198982266, "acc_norm": 0.6736111111111112, "acc_norm_stderr": 0.03921067198982266 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.44, "acc_stderr": 0.04988876515698589, "acc_norm": 0.44, "acc_norm_stderr": 0.04988876515698589 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.45, "acc_stderr": 0.049999999999999996, "acc_norm": 0.45, "acc_norm_stderr": 0.049999999999999996 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.4, "acc_stderr": 0.04923659639173309, "acc_norm": 0.4, "acc_norm_stderr": 0.04923659639173309 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.5086705202312138, "acc_stderr": 0.03811890988940412, "acc_norm": 0.5086705202312138, "acc_norm_stderr": 0.03811890988940412 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.3137254901960784, "acc_stderr": 0.04617034827006718, "acc_norm": 0.3137254901960784, "acc_norm_stderr": 0.04617034827006718 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.69, "acc_stderr": 0.04648231987117316, "acc_norm": 0.69, "acc_norm_stderr": 0.04648231987117316 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.46382978723404256, "acc_stderr": 0.032600385118357715, "acc_norm": 0.46382978723404256, "acc_norm_stderr": 0.032600385118357715 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.39473684210526316, "acc_stderr": 0.045981880578165414, "acc_norm": 0.39473684210526316, "acc_norm_stderr": 0.045981880578165414 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.5448275862068965, "acc_stderr": 0.04149886942192117, "acc_norm": 0.5448275862068965, "acc_norm_stderr": 0.04149886942192117 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.3783068783068783, "acc_stderr": 0.024976954053155254, "acc_norm": 0.3783068783068783, "acc_norm_stderr": 0.024976954053155254 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.3888888888888889, "acc_stderr": 0.04360314860077459, "acc_norm": 0.3888888888888889, "acc_norm_stderr": 0.04360314860077459 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.35, "acc_stderr": 0.0479372485441102, "acc_norm": 0.35, "acc_norm_stderr": 0.0479372485441102 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.7129032258064516, "acc_stderr": 0.025736542745594528, "acc_norm": 0.7129032258064516, "acc_norm_stderr": 0.025736542745594528 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.43842364532019706, "acc_stderr": 0.03491207857486518, "acc_norm": 0.43842364532019706, "acc_norm_stderr": 0.03491207857486518 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.64, "acc_stderr": 0.048241815132442176, "acc_norm": 0.64, "acc_norm_stderr": 0.048241815132442176 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.7878787878787878, "acc_stderr": 0.03192271569548301, "acc_norm": 0.7878787878787878, "acc_norm_stderr": 0.03192271569548301 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.7272727272727273, "acc_stderr": 0.03173071239071724, "acc_norm": 0.7272727272727273, "acc_norm_stderr": 0.03173071239071724 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.8238341968911918, "acc_stderr": 0.027493504244548047, "acc_norm": 0.8238341968911918, "acc_norm_stderr": 0.027493504244548047 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.5923076923076923, "acc_stderr": 0.02491524398598785, "acc_norm": 0.5923076923076923, "acc_norm_stderr": 0.02491524398598785 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.32592592592592595, "acc_stderr": 0.02857834836547308, "acc_norm": 0.32592592592592595, "acc_norm_stderr": 0.02857834836547308 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.6554621848739496, "acc_stderr": 0.030868682604121626, "acc_norm": 0.6554621848739496, "acc_norm_stderr": 0.030868682604121626 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.2913907284768212, "acc_stderr": 0.03710185726119994, "acc_norm": 0.2913907284768212, "acc_norm_stderr": 0.03710185726119994 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.7596330275229358, "acc_stderr": 0.01832060732096407, "acc_norm": 0.7596330275229358, "acc_norm_stderr": 0.01832060732096407 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.4722222222222222, "acc_stderr": 0.0340470532865388, "acc_norm": 0.4722222222222222, "acc_norm_stderr": 0.0340470532865388 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.7892156862745098, "acc_stderr": 0.028626547912437378, "acc_norm": 0.7892156862745098, "acc_norm_stderr": 0.028626547912437378 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.7383966244725738, "acc_stderr": 0.028609516716994934, "acc_norm": 0.7383966244725738, "acc_norm_stderr": 0.028609516716994934 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.6367713004484304, "acc_stderr": 0.032277904428505, "acc_norm": 0.6367713004484304, "acc_norm_stderr": 0.032277904428505 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.6564885496183206, "acc_stderr": 0.041649760719448786, "acc_norm": 0.6564885496183206, "acc_norm_stderr": 0.041649760719448786 }, "harness|hendrycksTest-international_law|5": { "acc": 0.7272727272727273, "acc_stderr": 0.04065578140908705, "acc_norm": 0.7272727272727273, "acc_norm_stderr": 0.04065578140908705 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.6759259259259259, "acc_stderr": 0.045245960070300476, "acc_norm": 0.6759259259259259, "acc_norm_stderr": 0.045245960070300476 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.6748466257668712, "acc_stderr": 0.036803503712864616, "acc_norm": 0.6748466257668712, "acc_norm_stderr": 0.036803503712864616 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.4375, "acc_stderr": 0.04708567521880525, "acc_norm": 0.4375, "acc_norm_stderr": 0.04708567521880525 }, "harness|hendrycksTest-management|5": { "acc": 0.7572815533980582, "acc_stderr": 0.04245022486384495, "acc_norm": 0.7572815533980582, "acc_norm_stderr": 0.04245022486384495 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8376068376068376, "acc_stderr": 0.024161618127987745, "acc_norm": 0.8376068376068376, "acc_norm_stderr": 0.024161618127987745 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.64, "acc_stderr": 0.04824181513244218, "acc_norm": 0.64, "acc_norm_stderr": 0.04824181513244218 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.768837803320562, "acc_stderr": 0.015075523238101074, "acc_norm": 0.768837803320562, "acc_norm_stderr": 0.015075523238101074 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.6213872832369942, "acc_stderr": 0.02611374936131034, "acc_norm": 0.6213872832369942, "acc_norm_stderr": 0.02611374936131034 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.2569832402234637, "acc_stderr": 0.01461446582196633, "acc_norm": 0.2569832402234637, "acc_norm_stderr": 0.01461446582196633 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.6503267973856209, "acc_stderr": 0.027305308076274695, "acc_norm": 0.6503267973856209, "acc_norm_stderr": 0.027305308076274695 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.662379421221865, "acc_stderr": 0.026858825879488544, "acc_norm": 0.662379421221865, "acc_norm_stderr": 0.026858825879488544 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.6388888888888888, "acc_stderr": 0.026725868809100786, "acc_norm": 0.6388888888888888, "acc_norm_stderr": 0.026725868809100786 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.42907801418439717, "acc_stderr": 0.029525914302558555, "acc_norm": 0.42907801418439717, "acc_norm_stderr": 0.029525914302558555 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.3924380704041721, "acc_stderr": 0.01247124366922911, "acc_norm": 0.3924380704041721, "acc_norm_stderr": 0.01247124366922911 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.5882352941176471, "acc_stderr": 0.02989616303312547, "acc_norm": 0.5882352941176471, "acc_norm_stderr": 0.02989616303312547 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.5686274509803921, "acc_stderr": 0.02003639376835263, "acc_norm": 0.5686274509803921, "acc_norm_stderr": 0.02003639376835263 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.5909090909090909, "acc_stderr": 0.04709306978661895, "acc_norm": 0.5909090909090909, "acc_norm_stderr": 0.04709306978661895 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.6571428571428571, "acc_stderr": 0.030387262919547735, "acc_norm": 0.6571428571428571, "acc_norm_stderr": 0.030387262919547735 }, "harness|hendrycksTest-sociology|5": { "acc": 0.8258706467661692, "acc_stderr": 0.026814951200421603, "acc_norm": 0.8258706467661692, "acc_norm_stderr": 0.026814951200421603 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.76, "acc_stderr": 0.042923469599092816, "acc_norm": 0.76, "acc_norm_stderr": 0.042923469599092816 }, "harness|hendrycksTest-virology|5": { "acc": 0.4819277108433735, "acc_stderr": 0.038899512528272166, "acc_norm": 0.4819277108433735, "acc_norm_stderr": 0.038899512528272166 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.8187134502923976, "acc_stderr": 0.029547741687640038, "acc_norm": 0.8187134502923976, "acc_norm_stderr": 0.029547741687640038 }, "harness|truthfulqa:mc|0": { "mc1": 0.3268053855569155, "mc1_stderr": 0.01641987473113503, "mc2": 0.4880155663864428, "mc2_stderr": 0.015371746911854285 }, "harness|winogrande|5": { "acc": 0.7782162588792423, "acc_stderr": 0.011676109244497813 }, "harness|gsm8k|5": { "acc": 0.3434420015163002, "acc_stderr": 0.013079933811800311 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_NLUHOPOE__test-case-0
[ "region:us" ]
2024-02-16T05:27:27+00:00
{"pretty_name": "Evaluation run of NLUHOPOE/test-case-0", "dataset_summary": "Dataset automatically created during the evaluation run of model [NLUHOPOE/test-case-0](https://huggingface.co/NLUHOPOE/test-case-0) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_NLUHOPOE__test-case-0\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-02-16T05:25:06.093843](https://huggingface.co/datasets/open-llm-leaderboard/details_NLUHOPOE__test-case-0/blob/main/results_2024-02-16T05-25-06.093843.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.5791278236658676,\n \"acc_stderr\": 0.033494817808173614,\n \"acc_norm\": 0.5837595891503912,\n \"acc_norm_stderr\": 0.03419368461778056,\n \"mc1\": 0.3268053855569155,\n \"mc1_stderr\": 0.01641987473113503,\n \"mc2\": 0.4880155663864428,\n \"mc2_stderr\": 0.015371746911854285\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.5349829351535836,\n \"acc_stderr\": 0.01457558392201967,\n \"acc_norm\": 0.5750853242320819,\n \"acc_norm_stderr\": 0.014445698968520769\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.5997809201354312,\n \"acc_stderr\": 0.004889413126208774,\n \"acc_norm\": 0.796355307707628,\n \"acc_norm_stderr\": 0.004018847286468061\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.31,\n \"acc_stderr\": 0.04648231987117316,\n \"acc_norm\": 0.31,\n \"acc_norm_stderr\": 0.04648231987117316\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.4888888888888889,\n \"acc_stderr\": 0.04318275491977976,\n \"acc_norm\": 0.4888888888888889,\n \"acc_norm_stderr\": 0.04318275491977976\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.5855263157894737,\n \"acc_stderr\": 0.04008973785779206,\n \"acc_norm\": 0.5855263157894737,\n \"acc_norm_stderr\": 0.04008973785779206\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.53,\n \"acc_stderr\": 0.050161355804659205,\n \"acc_norm\": 0.53,\n \"acc_norm_stderr\": 0.050161355804659205\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.6490566037735849,\n \"acc_stderr\": 0.02937364625323469,\n \"acc_norm\": 0.6490566037735849,\n \"acc_norm_stderr\": 0.02937364625323469\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.6736111111111112,\n \"acc_stderr\": 0.03921067198982266,\n \"acc_norm\": 0.6736111111111112,\n \"acc_norm_stderr\": 0.03921067198982266\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.44,\n \"acc_stderr\": 0.04988876515698589,\n \"acc_norm\": 0.44,\n \"acc_norm_stderr\": 0.04988876515698589\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.45,\n \"acc_stderr\": 0.049999999999999996,\n \"acc_norm\": 0.45,\n \"acc_norm_stderr\": 0.049999999999999996\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.4,\n \"acc_stderr\": 0.04923659639173309,\n \"acc_norm\": 0.4,\n \"acc_norm_stderr\": 0.04923659639173309\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.5086705202312138,\n \"acc_stderr\": 0.03811890988940412,\n \"acc_norm\": 0.5086705202312138,\n \"acc_norm_stderr\": 0.03811890988940412\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.3137254901960784,\n \"acc_stderr\": 0.04617034827006718,\n \"acc_norm\": 0.3137254901960784,\n \"acc_norm_stderr\": 0.04617034827006718\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.69,\n \"acc_stderr\": 0.04648231987117316,\n \"acc_norm\": 0.69,\n \"acc_norm_stderr\": 0.04648231987117316\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.46382978723404256,\n \"acc_stderr\": 0.032600385118357715,\n \"acc_norm\": 0.46382978723404256,\n \"acc_norm_stderr\": 0.032600385118357715\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.39473684210526316,\n \"acc_stderr\": 0.045981880578165414,\n \"acc_norm\": 0.39473684210526316,\n \"acc_norm_stderr\": 0.045981880578165414\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.5448275862068965,\n \"acc_stderr\": 0.04149886942192117,\n \"acc_norm\": 0.5448275862068965,\n \"acc_norm_stderr\": 0.04149886942192117\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.3783068783068783,\n \"acc_stderr\": 0.024976954053155254,\n \"acc_norm\": 0.3783068783068783,\n \"acc_norm_stderr\": 0.024976954053155254\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.3888888888888889,\n \"acc_stderr\": 0.04360314860077459,\n \"acc_norm\": 0.3888888888888889,\n \"acc_norm_stderr\": 0.04360314860077459\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.35,\n \"acc_stderr\": 0.0479372485441102,\n \"acc_norm\": 0.35,\n \"acc_norm_stderr\": 0.0479372485441102\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.7129032258064516,\n \"acc_stderr\": 0.025736542745594528,\n \"acc_norm\": 0.7129032258064516,\n \"acc_norm_stderr\": 0.025736542745594528\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.43842364532019706,\n \"acc_stderr\": 0.03491207857486518,\n \"acc_norm\": 0.43842364532019706,\n \"acc_norm_stderr\": 0.03491207857486518\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.64,\n \"acc_stderr\": 0.048241815132442176,\n \"acc_norm\": 0.64,\n \"acc_norm_stderr\": 0.048241815132442176\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.7878787878787878,\n \"acc_stderr\": 0.03192271569548301,\n \"acc_norm\": 0.7878787878787878,\n \"acc_norm_stderr\": 0.03192271569548301\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.7272727272727273,\n \"acc_stderr\": 0.03173071239071724,\n \"acc_norm\": 0.7272727272727273,\n \"acc_norm_stderr\": 0.03173071239071724\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.8238341968911918,\n \"acc_stderr\": 0.027493504244548047,\n \"acc_norm\": 0.8238341968911918,\n \"acc_norm_stderr\": 0.027493504244548047\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.5923076923076923,\n \"acc_stderr\": 0.02491524398598785,\n \"acc_norm\": 0.5923076923076923,\n \"acc_norm_stderr\": 0.02491524398598785\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.32592592592592595,\n \"acc_stderr\": 0.02857834836547308,\n \"acc_norm\": 0.32592592592592595,\n \"acc_norm_stderr\": 0.02857834836547308\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.6554621848739496,\n \"acc_stderr\": 0.030868682604121626,\n \"acc_norm\": 0.6554621848739496,\n \"acc_norm_stderr\": 0.030868682604121626\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.2913907284768212,\n \"acc_stderr\": 0.03710185726119994,\n \"acc_norm\": 0.2913907284768212,\n \"acc_norm_stderr\": 0.03710185726119994\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.7596330275229358,\n \"acc_stderr\": 0.01832060732096407,\n \"acc_norm\": 0.7596330275229358,\n \"acc_norm_stderr\": 0.01832060732096407\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.4722222222222222,\n \"acc_stderr\": 0.0340470532865388,\n \"acc_norm\": 0.4722222222222222,\n \"acc_norm_stderr\": 0.0340470532865388\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.7892156862745098,\n \"acc_stderr\": 0.028626547912437378,\n \"acc_norm\": 0.7892156862745098,\n \"acc_norm_stderr\": 0.028626547912437378\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.7383966244725738,\n \"acc_stderr\": 0.028609516716994934,\n \"acc_norm\": 0.7383966244725738,\n \"acc_norm_stderr\": 0.028609516716994934\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.6367713004484304,\n \"acc_stderr\": 0.032277904428505,\n \"acc_norm\": 0.6367713004484304,\n \"acc_norm_stderr\": 0.032277904428505\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.6564885496183206,\n \"acc_stderr\": 0.041649760719448786,\n \"acc_norm\": 0.6564885496183206,\n \"acc_norm_stderr\": 0.041649760719448786\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.7272727272727273,\n \"acc_stderr\": 0.04065578140908705,\n \"acc_norm\": 0.7272727272727273,\n \"acc_norm_stderr\": 0.04065578140908705\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.6759259259259259,\n \"acc_stderr\": 0.045245960070300476,\n \"acc_norm\": 0.6759259259259259,\n \"acc_norm_stderr\": 0.045245960070300476\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.6748466257668712,\n \"acc_stderr\": 0.036803503712864616,\n \"acc_norm\": 0.6748466257668712,\n \"acc_norm_stderr\": 0.036803503712864616\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.4375,\n \"acc_stderr\": 0.04708567521880525,\n \"acc_norm\": 0.4375,\n \"acc_norm_stderr\": 0.04708567521880525\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.7572815533980582,\n \"acc_stderr\": 0.04245022486384495,\n \"acc_norm\": 0.7572815533980582,\n \"acc_norm_stderr\": 0.04245022486384495\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8376068376068376,\n \"acc_stderr\": 0.024161618127987745,\n \"acc_norm\": 0.8376068376068376,\n \"acc_norm_stderr\": 0.024161618127987745\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.64,\n \"acc_stderr\": 0.04824181513244218,\n \"acc_norm\": 0.64,\n \"acc_norm_stderr\": 0.04824181513244218\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.768837803320562,\n \"acc_stderr\": 0.015075523238101074,\n \"acc_norm\": 0.768837803320562,\n \"acc_norm_stderr\": 0.015075523238101074\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.6213872832369942,\n \"acc_stderr\": 0.02611374936131034,\n \"acc_norm\": 0.6213872832369942,\n \"acc_norm_stderr\": 0.02611374936131034\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.2569832402234637,\n \"acc_stderr\": 0.01461446582196633,\n \"acc_norm\": 0.2569832402234637,\n \"acc_norm_stderr\": 0.01461446582196633\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.6503267973856209,\n \"acc_stderr\": 0.027305308076274695,\n \"acc_norm\": 0.6503267973856209,\n \"acc_norm_stderr\": 0.027305308076274695\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.662379421221865,\n \"acc_stderr\": 0.026858825879488544,\n \"acc_norm\": 0.662379421221865,\n \"acc_norm_stderr\": 0.026858825879488544\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.6388888888888888,\n \"acc_stderr\": 0.026725868809100786,\n \"acc_norm\": 0.6388888888888888,\n \"acc_norm_stderr\": 0.026725868809100786\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.42907801418439717,\n \"acc_stderr\": 0.029525914302558555,\n \"acc_norm\": 0.42907801418439717,\n \"acc_norm_stderr\": 0.029525914302558555\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.3924380704041721,\n \"acc_stderr\": 0.01247124366922911,\n \"acc_norm\": 0.3924380704041721,\n \"acc_norm_stderr\": 0.01247124366922911\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.5882352941176471,\n \"acc_stderr\": 0.02989616303312547,\n \"acc_norm\": 0.5882352941176471,\n \"acc_norm_stderr\": 0.02989616303312547\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.5686274509803921,\n \"acc_stderr\": 0.02003639376835263,\n \"acc_norm\": 0.5686274509803921,\n \"acc_norm_stderr\": 0.02003639376835263\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.5909090909090909,\n \"acc_stderr\": 0.04709306978661895,\n \"acc_norm\": 0.5909090909090909,\n \"acc_norm_stderr\": 0.04709306978661895\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.6571428571428571,\n \"acc_stderr\": 0.030387262919547735,\n \"acc_norm\": 0.6571428571428571,\n \"acc_norm_stderr\": 0.030387262919547735\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.8258706467661692,\n \"acc_stderr\": 0.026814951200421603,\n \"acc_norm\": 0.8258706467661692,\n \"acc_norm_stderr\": 0.026814951200421603\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.76,\n \"acc_stderr\": 0.042923469599092816,\n \"acc_norm\": 0.76,\n \"acc_norm_stderr\": 0.042923469599092816\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.4819277108433735,\n \"acc_stderr\": 0.038899512528272166,\n \"acc_norm\": 0.4819277108433735,\n \"acc_norm_stderr\": 0.038899512528272166\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.8187134502923976,\n \"acc_stderr\": 0.029547741687640038,\n \"acc_norm\": 0.8187134502923976,\n \"acc_norm_stderr\": 0.029547741687640038\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.3268053855569155,\n \"mc1_stderr\": 0.01641987473113503,\n \"mc2\": 0.4880155663864428,\n \"mc2_stderr\": 0.015371746911854285\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7782162588792423,\n \"acc_stderr\": 0.011676109244497813\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.3434420015163002,\n \"acc_stderr\": 0.013079933811800311\n }\n}\n```", "repo_url": "https://huggingface.co/NLUHOPOE/test-case-0", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_02_16T05_25_06.093843", "path": ["**/details_harness|arc:challenge|25_2024-02-16T05-25-06.093843.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-02-16T05-25-06.093843.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_02_16T05_25_06.093843", "path": ["**/details_harness|gsm8k|5_2024-02-16T05-25-06.093843.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-02-16T05-25-06.093843.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_02_16T05_25_06.093843", "path": ["**/details_harness|hellaswag|10_2024-02-16T05-25-06.093843.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-02-16T05-25-06.093843.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_02_16T05_25_06.093843", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-16T05-25-06.093843.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-02-16T05-25-06.093843.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-02-16T05-25-06.093843.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-02-16T05-25-06.093843.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-16T05-25-06.093843.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-02-16T05-25-06.093843.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-02-16T05-25-06.093843.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-02-16T05-25-06.093843.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-02-16T05-25-06.093843.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-02-16T05-25-06.093843.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-02-16T05-25-06.093843.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-02-16T05-25-06.093843.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-16T05-25-06.093843.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-02-16T05-25-06.093843.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-16T05-25-06.093843.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-16T05-25-06.093843.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-02-16T05-25-06.093843.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-02-16T05-25-06.093843.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-02-16T05-25-06.093843.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-16T05-25-06.093843.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-16T05-25-06.093843.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-16T05-25-06.093843.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-02-16T05-25-06.093843.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-16T05-25-06.093843.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-16T05-25-06.093843.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-16T05-25-06.093843.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-16T05-25-06.093843.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-02-16T05-25-06.093843.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-16T05-25-06.093843.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-16T05-25-06.093843.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-16T05-25-06.093843.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-16T05-25-06.093843.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-02-16T05-25-06.093843.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-02-16T05-25-06.093843.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-02-16T05-25-06.093843.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-02-16T05-25-06.093843.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-16T05-25-06.093843.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-02-16T05-25-06.093843.parquet", "**/details_harness|hendrycksTest-management|5_2024-02-16T05-25-06.093843.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-02-16T05-25-06.093843.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-02-16T05-25-06.093843.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-02-16T05-25-06.093843.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-02-16T05-25-06.093843.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-16T05-25-06.093843.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-02-16T05-25-06.093843.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-02-16T05-25-06.093843.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-02-16T05-25-06.093843.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-02-16T05-25-06.093843.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-02-16T05-25-06.093843.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-02-16T05-25-06.093843.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-02-16T05-25-06.093843.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-02-16T05-25-06.093843.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-02-16T05-25-06.093843.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-02-16T05-25-06.093843.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-16T05-25-06.093843.parquet", "**/details_harness|hendrycksTest-virology|5_2024-02-16T05-25-06.093843.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-02-16T05-25-06.093843.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-16T05-25-06.093843.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-02-16T05-25-06.093843.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-02-16T05-25-06.093843.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-02-16T05-25-06.093843.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-16T05-25-06.093843.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-02-16T05-25-06.093843.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-02-16T05-25-06.093843.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-02-16T05-25-06.093843.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-02-16T05-25-06.093843.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-02-16T05-25-06.093843.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-02-16T05-25-06.093843.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-02-16T05-25-06.093843.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-16T05-25-06.093843.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-02-16T05-25-06.093843.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-16T05-25-06.093843.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-16T05-25-06.093843.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-02-16T05-25-06.093843.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-02-16T05-25-06.093843.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-02-16T05-25-06.093843.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-16T05-25-06.093843.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-16T05-25-06.093843.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-16T05-25-06.093843.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-02-16T05-25-06.093843.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-16T05-25-06.093843.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-16T05-25-06.093843.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-16T05-25-06.093843.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-16T05-25-06.093843.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-02-16T05-25-06.093843.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-16T05-25-06.093843.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-16T05-25-06.093843.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-16T05-25-06.093843.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-16T05-25-06.093843.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-02-16T05-25-06.093843.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-02-16T05-25-06.093843.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-02-16T05-25-06.093843.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-02-16T05-25-06.093843.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-16T05-25-06.093843.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-02-16T05-25-06.093843.parquet", "**/details_harness|hendrycksTest-management|5_2024-02-16T05-25-06.093843.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-02-16T05-25-06.093843.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-02-16T05-25-06.093843.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-02-16T05-25-06.093843.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-02-16T05-25-06.093843.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-16T05-25-06.093843.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-02-16T05-25-06.093843.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-02-16T05-25-06.093843.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-02-16T05-25-06.093843.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-02-16T05-25-06.093843.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-02-16T05-25-06.093843.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-02-16T05-25-06.093843.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-02-16T05-25-06.093843.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-02-16T05-25-06.093843.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-02-16T05-25-06.093843.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-02-16T05-25-06.093843.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-16T05-25-06.093843.parquet", "**/details_harness|hendrycksTest-virology|5_2024-02-16T05-25-06.093843.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-02-16T05-25-06.093843.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_02_16T05_25_06.093843", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-16T05-25-06.093843.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-16T05-25-06.093843.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_02_16T05_25_06.093843", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-02-16T05-25-06.093843.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-02-16T05-25-06.093843.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_02_16T05_25_06.093843", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-02-16T05-25-06.093843.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-02-16T05-25-06.093843.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_02_16T05_25_06.093843", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-02-16T05-25-06.093843.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-02-16T05-25-06.093843.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_02_16T05_25_06.093843", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-16T05-25-06.093843.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-16T05-25-06.093843.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_02_16T05_25_06.093843", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-02-16T05-25-06.093843.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-02-16T05-25-06.093843.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_02_16T05_25_06.093843", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-02-16T05-25-06.093843.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-02-16T05-25-06.093843.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_02_16T05_25_06.093843", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-02-16T05-25-06.093843.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-02-16T05-25-06.093843.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_02_16T05_25_06.093843", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-02-16T05-25-06.093843.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-02-16T05-25-06.093843.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_02_16T05_25_06.093843", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-02-16T05-25-06.093843.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-02-16T05-25-06.093843.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_02_16T05_25_06.093843", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-02-16T05-25-06.093843.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-02-16T05-25-06.093843.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_02_16T05_25_06.093843", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-02-16T05-25-06.093843.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-02-16T05-25-06.093843.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_02_16T05_25_06.093843", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-16T05-25-06.093843.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-16T05-25-06.093843.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_02_16T05_25_06.093843", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-02-16T05-25-06.093843.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-02-16T05-25-06.093843.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_02_16T05_25_06.093843", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-16T05-25-06.093843.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-16T05-25-06.093843.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_02_16T05_25_06.093843", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-16T05-25-06.093843.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-16T05-25-06.093843.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_02_16T05_25_06.093843", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-02-16T05-25-06.093843.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-02-16T05-25-06.093843.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_02_16T05_25_06.093843", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-02-16T05-25-06.093843.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-02-16T05-25-06.093843.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_02_16T05_25_06.093843", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-02-16T05-25-06.093843.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-02-16T05-25-06.093843.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_02_16T05_25_06.093843", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-16T05-25-06.093843.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-16T05-25-06.093843.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_02_16T05_25_06.093843", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-16T05-25-06.093843.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-16T05-25-06.093843.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_02_16T05_25_06.093843", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-16T05-25-06.093843.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-16T05-25-06.093843.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_02_16T05_25_06.093843", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-02-16T05-25-06.093843.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-02-16T05-25-06.093843.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_02_16T05_25_06.093843", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-16T05-25-06.093843.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-16T05-25-06.093843.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_02_16T05_25_06.093843", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-16T05-25-06.093843.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-16T05-25-06.093843.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_02_16T05_25_06.093843", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-16T05-25-06.093843.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-16T05-25-06.093843.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_02_16T05_25_06.093843", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-16T05-25-06.093843.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-16T05-25-06.093843.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_02_16T05_25_06.093843", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-02-16T05-25-06.093843.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-02-16T05-25-06.093843.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_02_16T05_25_06.093843", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-16T05-25-06.093843.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-16T05-25-06.093843.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_02_16T05_25_06.093843", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-16T05-25-06.093843.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-16T05-25-06.093843.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_02_16T05_25_06.093843", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-16T05-25-06.093843.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-16T05-25-06.093843.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_02_16T05_25_06.093843", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-16T05-25-06.093843.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-16T05-25-06.093843.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_02_16T05_25_06.093843", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-02-16T05-25-06.093843.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-02-16T05-25-06.093843.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_02_16T05_25_06.093843", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-02-16T05-25-06.093843.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-02-16T05-25-06.093843.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_02_16T05_25_06.093843", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-02-16T05-25-06.093843.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-02-16T05-25-06.093843.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_02_16T05_25_06.093843", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-02-16T05-25-06.093843.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-02-16T05-25-06.093843.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_02_16T05_25_06.093843", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-16T05-25-06.093843.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-16T05-25-06.093843.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_02_16T05_25_06.093843", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-02-16T05-25-06.093843.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-02-16T05-25-06.093843.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_02_16T05_25_06.093843", "path": ["**/details_harness|hendrycksTest-management|5_2024-02-16T05-25-06.093843.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-02-16T05-25-06.093843.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_02_16T05_25_06.093843", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-02-16T05-25-06.093843.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-02-16T05-25-06.093843.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_02_16T05_25_06.093843", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-02-16T05-25-06.093843.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-02-16T05-25-06.093843.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_02_16T05_25_06.093843", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-02-16T05-25-06.093843.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-02-16T05-25-06.093843.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_02_16T05_25_06.093843", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-02-16T05-25-06.093843.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-02-16T05-25-06.093843.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_02_16T05_25_06.093843", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-16T05-25-06.093843.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-16T05-25-06.093843.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_02_16T05_25_06.093843", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-02-16T05-25-06.093843.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-02-16T05-25-06.093843.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_02_16T05_25_06.093843", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-02-16T05-25-06.093843.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-02-16T05-25-06.093843.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_02_16T05_25_06.093843", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-02-16T05-25-06.093843.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-02-16T05-25-06.093843.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_02_16T05_25_06.093843", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-02-16T05-25-06.093843.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-02-16T05-25-06.093843.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_02_16T05_25_06.093843", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-02-16T05-25-06.093843.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-02-16T05-25-06.093843.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_02_16T05_25_06.093843", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-02-16T05-25-06.093843.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-02-16T05-25-06.093843.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_02_16T05_25_06.093843", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-02-16T05-25-06.093843.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-02-16T05-25-06.093843.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_02_16T05_25_06.093843", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-02-16T05-25-06.093843.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-02-16T05-25-06.093843.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_02_16T05_25_06.093843", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-02-16T05-25-06.093843.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-02-16T05-25-06.093843.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_02_16T05_25_06.093843", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-02-16T05-25-06.093843.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-02-16T05-25-06.093843.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_02_16T05_25_06.093843", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-16T05-25-06.093843.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-16T05-25-06.093843.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_02_16T05_25_06.093843", "path": ["**/details_harness|hendrycksTest-virology|5_2024-02-16T05-25-06.093843.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-02-16T05-25-06.093843.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_02_16T05_25_06.093843", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-02-16T05-25-06.093843.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-02-16T05-25-06.093843.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_02_16T05_25_06.093843", "path": ["**/details_harness|truthfulqa:mc|0_2024-02-16T05-25-06.093843.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-02-16T05-25-06.093843.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_02_16T05_25_06.093843", "path": ["**/details_harness|winogrande|5_2024-02-16T05-25-06.093843.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-02-16T05-25-06.093843.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_02_16T05_25_06.093843", "path": ["results_2024-02-16T05-25-06.093843.parquet"]}, {"split": "latest", "path": ["results_2024-02-16T05-25-06.093843.parquet"]}]}]}
2024-02-16T05:27:47+00:00
4a2e29cd0c17e294217b49b3abdf9433e6c55473
Jonakhawbs/linkedin_dataset_hire
[ "region:us" ]
2024-02-16T05:30:22+00:00
{}
2024-02-16T05:33:27+00:00
2a63d669ac8c2994f6e618528dec466208041853
This dataset consists ColBERTv2.0 document vectors for the entire TREC-COVID dataset from BeIR. That 128 dimension per token, with 180 tokens for each of 171332 documents. The dataset was created using A100-40GB sponsored by Qdrant. The code to create these vectors is here: https://colab.research.google.com/drive/1hEhyleSrBz_mPyQJnRc0MwBenDuX1ahY?usp=sharing This dataset was created for indexing experiments by Qdrant.
Qdrant/ColBERT-TREC-COVID
[ "task_categories:feature-extraction", "size_categories:100K<n<1M", "language:en", "license:mit", "medical", "region:us" ]
2024-02-16T05:35:27+00:00
{"language": ["en"], "license": "mit", "size_categories": ["100K<n<1M"], "task_categories": ["feature-extraction"], "pretty_name": "ColBERT TREC COVID", "dataset_info": {"features": [{"name": "documents", "sequence": {"sequence": "float16"}}], "splits": [{"name": "train", "num_bytes": 8019022928, "num_examples": 171332}], "download_size": 5775769873, "dataset_size": 8019022928}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}], "tags": ["medical"]}
2024-02-16T06:10:04+00:00
106f24d6f230aea99516287a1e136e9e29e4945d
benayas/banking_augmented_20pct_v0
[ "region:us" ]
2024-02-16T05:41:04+00:00
{"dataset_info": {"features": [{"name": "text", "dtype": "string"}, {"name": "category", "dtype": "string"}, {"name": "__index_level_0__", "dtype": "int64"}], "splits": [{"name": "train", "num_bytes": 1018827, "num_examples": 10003}], "download_size": 425615, "dataset_size": 1018827}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]}
2024-02-16T05:41:07+00:00
3835f192b287746f00fc85ffff28b1b097a4cc52
dooooosooooo/tofu_character_ai
[ "task_categories:question-answering", "language:ko", "license:unknown", "region:us" ]
2024-02-16T06:10:26+00:00
{"language": ["ko"], "license": "unknown", "task_categories": ["question-answering"]}
2024-02-16T06:22:59+00:00
e66fbe25b1e23cf967f1d2832a4e06b7717c808f
JJini/honsol
[ "region:us" ]
2024-02-16T06:14:25+00:00
{"dataset_info": {"features": [{"name": "text", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 6682255, "num_examples": 12880}], "download_size": 1056734, "dataset_size": 6682255}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]}
2024-02-16T06:14:35+00:00
9824f1afb87451b3837e585d85eb93896dfe1107
## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** Matichon Maneegard - **Shared by [optional]:** Matichon Maneegard - **Language(s) (NLP):** image-to-text - **License:** apache-2.0 ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> The dataset was entirely synthetic. It does not contain real information or pertain to any specific person. ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> Using for tranning OCR or Multimodal. ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> This dataset contains 98 x 6 = 588 samples, and the labels contain 98 samples. Each sample will have a different scenario to represent. The 'train.csv' file contains 11 attributes: ```File_Index, first_name_th, first_name_en, last_name_en, birth_date_th, birth_date_en, religion, first_address_th, second_address_th, third_address_th, forth_address_th``` The 'File_Index' corresponds to the number of the image in the scenario with the training data. It means that '/Scenario_1/file_1.png' has the same attributes as '/Scenario_2/file_1.png'. ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ### Contact Twitter : (Mati)[https://twitter.com/KMatiDev1] E-mail : [email protected] VulturePrime : (VulturePrime)[https://vultureprime.com] Float16 : (Float16.cloud)[https://float16.cloud]
matichon/ThaiIDCardSynt
[ "task_categories:image-to-text", "size_categories:n<1K", "language:th", "license:apache-2.0", "region:us" ]
2024-02-16T06:18:15+00:00
{"language": ["th"], "license": "apache-2.0", "size_categories": ["n<1K"], "task_categories": ["image-to-text"]}
2024-02-16T07:35:32+00:00
d36778cf817b687b55988c45dfc662dd4c8b6c23
taesiri/simple_fsm_bench_long_text
[ "region:us" ]
2024-02-16T06:20:49+00:00
{"dataset_info": {"features": [{"name": "id", "dtype": "string"}, {"name": "fsm_json", "dtype": "string"}, {"name": "long_string", "dtype": "string"}, {"name": "final_acceptance_status", "dtype": "string"}, {"name": "acceptance_status", "sequence": "string"}, {"name": "char_list", "sequence": "string"}, {"name": "state_list", "sequence": "string"}, {"name": "final_state", "dtype": "string"}, {"name": "difficulty_level", "dtype": "int64"}, {"name": "dot", "dtype": "string"}, {"name": "transition_matrix", "dtype": "string"}], "splits": [{"name": "validation", "num_bytes": 6435777, "num_examples": 1000}], "download_size": 1029673, "dataset_size": 6435777}, "configs": [{"config_name": "default", "data_files": [{"split": "validation", "path": "data/validation-*"}]}]}
2024-02-16T06:20:50+00:00
7fa3bcad12f32cbbbb10c60a67ae5aae1376df59
ammaralam/guanaco-llama2-1k
[ "region:us" ]
2024-02-16T06:22:34+00:00
{"dataset_info": {"features": [{"name": "text", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 1654448, "num_examples": 1000}], "download_size": 966692, "dataset_size": 1654448}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]}
2024-02-16T06:22:36+00:00
048fc1a5f3f8cf3f8d050f1e727bce1233b32f12
# Dataset Card for Evaluation run of Undi95/PsyMedRP-v1-20B <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [Undi95/PsyMedRP-v1-20B](https://huggingface.co/Undi95/PsyMedRP-v1-20B) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_Undi95__PsyMedRP-v1-20B", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-02-16T06:33:57.302712](https://huggingface.co/datasets/open-llm-leaderboard/details_Undi95__PsyMedRP-v1-20B/blob/main/results_2024-02-16T06-33-57.302712.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.5647260784625223, "acc_stderr": 0.033553791007284096, "acc_norm": 0.5721079188379258, "acc_norm_stderr": 0.03429829853750649, "mc1": 0.379436964504284, "mc1_stderr": 0.016987039266142985, "mc2": 0.5444967551355537, "mc2_stderr": 0.015846880267326138 }, "harness|arc:challenge|25": { "acc": 0.5861774744027304, "acc_stderr": 0.014392730009221009, "acc_norm": 0.6049488054607508, "acc_norm_stderr": 0.01428589829293817 }, "harness|hellaswag|10": { "acc": 0.6552479585739892, "acc_stderr": 0.004743160034271149, "acc_norm": 0.8393746265684127, "acc_norm_stderr": 0.0036643462998943955 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.37, "acc_stderr": 0.048523658709391, "acc_norm": 0.37, "acc_norm_stderr": 0.048523658709391 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.5037037037037037, "acc_stderr": 0.04319223625811331, "acc_norm": 0.5037037037037037, "acc_norm_stderr": 0.04319223625811331 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.5394736842105263, "acc_stderr": 0.04056242252249034, "acc_norm": 0.5394736842105263, "acc_norm_stderr": 0.04056242252249034 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.58, "acc_stderr": 0.049604496374885836, "acc_norm": 0.58, "acc_norm_stderr": 0.049604496374885836 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.5924528301886792, "acc_stderr": 0.030242233800854494, "acc_norm": 0.5924528301886792, "acc_norm_stderr": 0.030242233800854494 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.5972222222222222, "acc_stderr": 0.04101405519842426, "acc_norm": 0.5972222222222222, "acc_norm_stderr": 0.04101405519842426 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.39, "acc_stderr": 0.04902071300001975, "acc_norm": 0.39, "acc_norm_stderr": 0.04902071300001975 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.47, "acc_stderr": 0.05016135580465919, "acc_norm": 0.47, "acc_norm_stderr": 0.05016135580465919 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.33, "acc_stderr": 0.047258156262526045, "acc_norm": 0.33, "acc_norm_stderr": 0.047258156262526045 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.5086705202312138, "acc_stderr": 0.038118909889404105, "acc_norm": 0.5086705202312138, "acc_norm_stderr": 0.038118909889404105 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.3137254901960784, "acc_stderr": 0.04617034827006716, "acc_norm": 0.3137254901960784, "acc_norm_stderr": 0.04617034827006716 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.66, "acc_stderr": 0.047609522856952365, "acc_norm": 0.66, "acc_norm_stderr": 0.047609522856952365 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.46808510638297873, "acc_stderr": 0.03261936918467381, "acc_norm": 0.46808510638297873, "acc_norm_stderr": 0.03261936918467381 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.2543859649122807, "acc_stderr": 0.04096985139843671, "acc_norm": 0.2543859649122807, "acc_norm_stderr": 0.04096985139843671 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.5310344827586206, "acc_stderr": 0.04158632762097828, "acc_norm": 0.5310344827586206, "acc_norm_stderr": 0.04158632762097828 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.3412698412698413, "acc_stderr": 0.02441923496681907, "acc_norm": 0.3412698412698413, "acc_norm_stderr": 0.02441923496681907 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.3412698412698413, "acc_stderr": 0.04240799327574924, "acc_norm": 0.3412698412698413, "acc_norm_stderr": 0.04240799327574924 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.36, "acc_stderr": 0.048241815132442176, "acc_norm": 0.36, "acc_norm_stderr": 0.048241815132442176 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.6741935483870968, "acc_stderr": 0.026662010578567107, "acc_norm": 0.6741935483870968, "acc_norm_stderr": 0.026662010578567107 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.458128078817734, "acc_stderr": 0.03505630140785741, "acc_norm": 0.458128078817734, "acc_norm_stderr": 0.03505630140785741 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.56, "acc_stderr": 0.04988876515698589, "acc_norm": 0.56, "acc_norm_stderr": 0.04988876515698589 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.7090909090909091, "acc_stderr": 0.03546563019624336, "acc_norm": 0.7090909090909091, "acc_norm_stderr": 0.03546563019624336 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.7222222222222222, "acc_stderr": 0.031911782267135466, "acc_norm": 0.7222222222222222, "acc_norm_stderr": 0.031911782267135466 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.7979274611398963, "acc_stderr": 0.028979089794296732, "acc_norm": 0.7979274611398963, "acc_norm_stderr": 0.028979089794296732 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.5538461538461539, "acc_stderr": 0.02520357177302833, "acc_norm": 0.5538461538461539, "acc_norm_stderr": 0.02520357177302833 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.3148148148148148, "acc_stderr": 0.028317533496066482, "acc_norm": 0.3148148148148148, "acc_norm_stderr": 0.028317533496066482 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.6050420168067226, "acc_stderr": 0.031753678460966266, "acc_norm": 0.6050420168067226, "acc_norm_stderr": 0.031753678460966266 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.3509933774834437, "acc_stderr": 0.03896981964257375, "acc_norm": 0.3509933774834437, "acc_norm_stderr": 0.03896981964257375 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.7211009174311926, "acc_stderr": 0.0192274688764635, "acc_norm": 0.7211009174311926, "acc_norm_stderr": 0.0192274688764635 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.4444444444444444, "acc_stderr": 0.03388857118502326, "acc_norm": 0.4444444444444444, "acc_norm_stderr": 0.03388857118502326 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.7990196078431373, "acc_stderr": 0.028125972265654362, "acc_norm": 0.7990196078431373, "acc_norm_stderr": 0.028125972265654362 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.7468354430379747, "acc_stderr": 0.02830465794303529, "acc_norm": 0.7468354430379747, "acc_norm_stderr": 0.02830465794303529 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.6771300448430493, "acc_stderr": 0.03138147637575499, "acc_norm": 0.6771300448430493, "acc_norm_stderr": 0.03138147637575499 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.648854961832061, "acc_stderr": 0.04186445163013751, "acc_norm": 0.648854961832061, "acc_norm_stderr": 0.04186445163013751 }, "harness|hendrycksTest-international_law|5": { "acc": 0.768595041322314, "acc_stderr": 0.03849856098794089, "acc_norm": 0.768595041322314, "acc_norm_stderr": 0.03849856098794089 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.7129629629629629, "acc_stderr": 0.043733130409147614, "acc_norm": 0.7129629629629629, "acc_norm_stderr": 0.043733130409147614 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.6932515337423313, "acc_stderr": 0.03623089915724147, "acc_norm": 0.6932515337423313, "acc_norm_stderr": 0.03623089915724147 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.30357142857142855, "acc_stderr": 0.04364226155841044, "acc_norm": 0.30357142857142855, "acc_norm_stderr": 0.04364226155841044 }, "harness|hendrycksTest-management|5": { "acc": 0.6504854368932039, "acc_stderr": 0.047211885060971716, "acc_norm": 0.6504854368932039, "acc_norm_stderr": 0.047211885060971716 }, "harness|hendrycksTest-marketing|5": { "acc": 0.811965811965812, "acc_stderr": 0.02559819368665225, "acc_norm": 0.811965811965812, "acc_norm_stderr": 0.02559819368665225 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.58, "acc_stderr": 0.049604496374885836, "acc_norm": 0.58, "acc_norm_stderr": 0.049604496374885836 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.735632183908046, "acc_stderr": 0.015769984840690525, "acc_norm": 0.735632183908046, "acc_norm_stderr": 0.015769984840690525 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.6473988439306358, "acc_stderr": 0.025722802200895803, "acc_norm": 0.6473988439306358, "acc_norm_stderr": 0.025722802200895803 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.3865921787709497, "acc_stderr": 0.016286674879101022, "acc_norm": 0.3865921787709497, "acc_norm_stderr": 0.016286674879101022 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.630718954248366, "acc_stderr": 0.027634176689602663, "acc_norm": 0.630718954248366, "acc_norm_stderr": 0.027634176689602663 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.6784565916398714, "acc_stderr": 0.026527724079528872, "acc_norm": 0.6784565916398714, "acc_norm_stderr": 0.026527724079528872 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.6635802469135802, "acc_stderr": 0.026289734945952922, "acc_norm": 0.6635802469135802, "acc_norm_stderr": 0.026289734945952922 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.44680851063829785, "acc_stderr": 0.029658235097666904, "acc_norm": 0.44680851063829785, "acc_norm_stderr": 0.029658235097666904 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.4511082138200782, "acc_stderr": 0.012709037347346233, "acc_norm": 0.4511082138200782, "acc_norm_stderr": 0.012709037347346233 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.5698529411764706, "acc_stderr": 0.030074971917302875, "acc_norm": 0.5698529411764706, "acc_norm_stderr": 0.030074971917302875 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.6029411764705882, "acc_stderr": 0.019794488900024117, "acc_norm": 0.6029411764705882, "acc_norm_stderr": 0.019794488900024117 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6181818181818182, "acc_stderr": 0.046534298079135075, "acc_norm": 0.6181818181818182, "acc_norm_stderr": 0.046534298079135075 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.636734693877551, "acc_stderr": 0.030789051139030806, "acc_norm": 0.636734693877551, "acc_norm_stderr": 0.030789051139030806 }, "harness|hendrycksTest-sociology|5": { "acc": 0.7562189054726368, "acc_stderr": 0.030360490154014635, "acc_norm": 0.7562189054726368, "acc_norm_stderr": 0.030360490154014635 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.86, "acc_stderr": 0.03487350880197769, "acc_norm": 0.86, "acc_norm_stderr": 0.03487350880197769 }, "harness|hendrycksTest-virology|5": { "acc": 0.41566265060240964, "acc_stderr": 0.03836722176598052, "acc_norm": 0.41566265060240964, "acc_norm_stderr": 0.03836722176598052 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.7543859649122807, "acc_stderr": 0.03301405946987249, "acc_norm": 0.7543859649122807, "acc_norm_stderr": 0.03301405946987249 }, "harness|truthfulqa:mc|0": { "mc1": 0.379436964504284, "mc1_stderr": 0.016987039266142985, "mc2": 0.5444967551355537, "mc2_stderr": 0.015846880267326138 }, "harness|winogrande|5": { "acc": 0.7482241515390686, "acc_stderr": 0.012198489100259785 }, "harness|gsm8k|5": { "acc": 0.14859742228961334, "acc_stderr": 0.009797503180527883 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_Undi95__PsyMedRP-v1-20B
[ "region:us" ]
2024-02-16T06:36:16+00:00
{"pretty_name": "Evaluation run of Undi95/PsyMedRP-v1-20B", "dataset_summary": "Dataset automatically created during the evaluation run of model [Undi95/PsyMedRP-v1-20B](https://huggingface.co/Undi95/PsyMedRP-v1-20B) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_Undi95__PsyMedRP-v1-20B\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-02-16T06:33:57.302712](https://huggingface.co/datasets/open-llm-leaderboard/details_Undi95__PsyMedRP-v1-20B/blob/main/results_2024-02-16T06-33-57.302712.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.5647260784625223,\n \"acc_stderr\": 0.033553791007284096,\n \"acc_norm\": 0.5721079188379258,\n \"acc_norm_stderr\": 0.03429829853750649,\n \"mc1\": 0.379436964504284,\n \"mc1_stderr\": 0.016987039266142985,\n \"mc2\": 0.5444967551355537,\n \"mc2_stderr\": 0.015846880267326138\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.5861774744027304,\n \"acc_stderr\": 0.014392730009221009,\n \"acc_norm\": 0.6049488054607508,\n \"acc_norm_stderr\": 0.01428589829293817\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6552479585739892,\n \"acc_stderr\": 0.004743160034271149,\n \"acc_norm\": 0.8393746265684127,\n \"acc_norm_stderr\": 0.0036643462998943955\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.37,\n \"acc_stderr\": 0.048523658709391,\n \"acc_norm\": 0.37,\n \"acc_norm_stderr\": 0.048523658709391\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.5037037037037037,\n \"acc_stderr\": 0.04319223625811331,\n \"acc_norm\": 0.5037037037037037,\n \"acc_norm_stderr\": 0.04319223625811331\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.5394736842105263,\n \"acc_stderr\": 0.04056242252249034,\n \"acc_norm\": 0.5394736842105263,\n \"acc_norm_stderr\": 0.04056242252249034\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.58,\n \"acc_stderr\": 0.049604496374885836,\n \"acc_norm\": 0.58,\n \"acc_norm_stderr\": 0.049604496374885836\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.5924528301886792,\n \"acc_stderr\": 0.030242233800854494,\n \"acc_norm\": 0.5924528301886792,\n \"acc_norm_stderr\": 0.030242233800854494\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.5972222222222222,\n \"acc_stderr\": 0.04101405519842426,\n \"acc_norm\": 0.5972222222222222,\n \"acc_norm_stderr\": 0.04101405519842426\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.39,\n \"acc_stderr\": 0.04902071300001975,\n \"acc_norm\": 0.39,\n \"acc_norm_stderr\": 0.04902071300001975\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.47,\n \"acc_stderr\": 0.05016135580465919,\n \"acc_norm\": 0.47,\n \"acc_norm_stderr\": 0.05016135580465919\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.33,\n \"acc_stderr\": 0.047258156262526045,\n \"acc_norm\": 0.33,\n \"acc_norm_stderr\": 0.047258156262526045\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.5086705202312138,\n \"acc_stderr\": 0.038118909889404105,\n \"acc_norm\": 0.5086705202312138,\n \"acc_norm_stderr\": 0.038118909889404105\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.3137254901960784,\n \"acc_stderr\": 0.04617034827006716,\n \"acc_norm\": 0.3137254901960784,\n \"acc_norm_stderr\": 0.04617034827006716\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.66,\n \"acc_stderr\": 0.047609522856952365,\n \"acc_norm\": 0.66,\n \"acc_norm_stderr\": 0.047609522856952365\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.46808510638297873,\n \"acc_stderr\": 0.03261936918467381,\n \"acc_norm\": 0.46808510638297873,\n \"acc_norm_stderr\": 0.03261936918467381\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.2543859649122807,\n \"acc_stderr\": 0.04096985139843671,\n \"acc_norm\": 0.2543859649122807,\n \"acc_norm_stderr\": 0.04096985139843671\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.5310344827586206,\n \"acc_stderr\": 0.04158632762097828,\n \"acc_norm\": 0.5310344827586206,\n \"acc_norm_stderr\": 0.04158632762097828\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.3412698412698413,\n \"acc_stderr\": 0.02441923496681907,\n \"acc_norm\": 0.3412698412698413,\n \"acc_norm_stderr\": 0.02441923496681907\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.3412698412698413,\n \"acc_stderr\": 0.04240799327574924,\n \"acc_norm\": 0.3412698412698413,\n \"acc_norm_stderr\": 0.04240799327574924\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.36,\n \"acc_stderr\": 0.048241815132442176,\n \"acc_norm\": 0.36,\n \"acc_norm_stderr\": 0.048241815132442176\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.6741935483870968,\n \"acc_stderr\": 0.026662010578567107,\n \"acc_norm\": 0.6741935483870968,\n \"acc_norm_stderr\": 0.026662010578567107\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.458128078817734,\n \"acc_stderr\": 0.03505630140785741,\n \"acc_norm\": 0.458128078817734,\n \"acc_norm_stderr\": 0.03505630140785741\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.56,\n \"acc_stderr\": 0.04988876515698589,\n \"acc_norm\": 0.56,\n \"acc_norm_stderr\": 0.04988876515698589\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.7090909090909091,\n \"acc_stderr\": 0.03546563019624336,\n \"acc_norm\": 0.7090909090909091,\n \"acc_norm_stderr\": 0.03546563019624336\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.7222222222222222,\n \"acc_stderr\": 0.031911782267135466,\n \"acc_norm\": 0.7222222222222222,\n \"acc_norm_stderr\": 0.031911782267135466\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.7979274611398963,\n \"acc_stderr\": 0.028979089794296732,\n \"acc_norm\": 0.7979274611398963,\n \"acc_norm_stderr\": 0.028979089794296732\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.5538461538461539,\n \"acc_stderr\": 0.02520357177302833,\n \"acc_norm\": 0.5538461538461539,\n \"acc_norm_stderr\": 0.02520357177302833\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.3148148148148148,\n \"acc_stderr\": 0.028317533496066482,\n \"acc_norm\": 0.3148148148148148,\n \"acc_norm_stderr\": 0.028317533496066482\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.6050420168067226,\n \"acc_stderr\": 0.031753678460966266,\n \"acc_norm\": 0.6050420168067226,\n \"acc_norm_stderr\": 0.031753678460966266\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.3509933774834437,\n \"acc_stderr\": 0.03896981964257375,\n \"acc_norm\": 0.3509933774834437,\n \"acc_norm_stderr\": 0.03896981964257375\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.7211009174311926,\n \"acc_stderr\": 0.0192274688764635,\n \"acc_norm\": 0.7211009174311926,\n \"acc_norm_stderr\": 0.0192274688764635\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.4444444444444444,\n \"acc_stderr\": 0.03388857118502326,\n \"acc_norm\": 0.4444444444444444,\n \"acc_norm_stderr\": 0.03388857118502326\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.7990196078431373,\n \"acc_stderr\": 0.028125972265654362,\n \"acc_norm\": 0.7990196078431373,\n \"acc_norm_stderr\": 0.028125972265654362\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.7468354430379747,\n \"acc_stderr\": 0.02830465794303529,\n \"acc_norm\": 0.7468354430379747,\n \"acc_norm_stderr\": 0.02830465794303529\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.6771300448430493,\n \"acc_stderr\": 0.03138147637575499,\n \"acc_norm\": 0.6771300448430493,\n \"acc_norm_stderr\": 0.03138147637575499\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.648854961832061,\n \"acc_stderr\": 0.04186445163013751,\n \"acc_norm\": 0.648854961832061,\n \"acc_norm_stderr\": 0.04186445163013751\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.768595041322314,\n \"acc_stderr\": 0.03849856098794089,\n \"acc_norm\": 0.768595041322314,\n \"acc_norm_stderr\": 0.03849856098794089\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.7129629629629629,\n \"acc_stderr\": 0.043733130409147614,\n \"acc_norm\": 0.7129629629629629,\n \"acc_norm_stderr\": 0.043733130409147614\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.6932515337423313,\n \"acc_stderr\": 0.03623089915724147,\n \"acc_norm\": 0.6932515337423313,\n \"acc_norm_stderr\": 0.03623089915724147\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.30357142857142855,\n \"acc_stderr\": 0.04364226155841044,\n \"acc_norm\": 0.30357142857142855,\n \"acc_norm_stderr\": 0.04364226155841044\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.6504854368932039,\n \"acc_stderr\": 0.047211885060971716,\n \"acc_norm\": 0.6504854368932039,\n \"acc_norm_stderr\": 0.047211885060971716\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.811965811965812,\n \"acc_stderr\": 0.02559819368665225,\n \"acc_norm\": 0.811965811965812,\n \"acc_norm_stderr\": 0.02559819368665225\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.58,\n \"acc_stderr\": 0.049604496374885836,\n \"acc_norm\": 0.58,\n \"acc_norm_stderr\": 0.049604496374885836\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.735632183908046,\n \"acc_stderr\": 0.015769984840690525,\n \"acc_norm\": 0.735632183908046,\n \"acc_norm_stderr\": 0.015769984840690525\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.6473988439306358,\n \"acc_stderr\": 0.025722802200895803,\n \"acc_norm\": 0.6473988439306358,\n \"acc_norm_stderr\": 0.025722802200895803\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.3865921787709497,\n \"acc_stderr\": 0.016286674879101022,\n \"acc_norm\": 0.3865921787709497,\n \"acc_norm_stderr\": 0.016286674879101022\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.630718954248366,\n \"acc_stderr\": 0.027634176689602663,\n \"acc_norm\": 0.630718954248366,\n \"acc_norm_stderr\": 0.027634176689602663\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.6784565916398714,\n \"acc_stderr\": 0.026527724079528872,\n \"acc_norm\": 0.6784565916398714,\n \"acc_norm_stderr\": 0.026527724079528872\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.6635802469135802,\n \"acc_stderr\": 0.026289734945952922,\n \"acc_norm\": 0.6635802469135802,\n \"acc_norm_stderr\": 0.026289734945952922\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.44680851063829785,\n \"acc_stderr\": 0.029658235097666904,\n \"acc_norm\": 0.44680851063829785,\n \"acc_norm_stderr\": 0.029658235097666904\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.4511082138200782,\n \"acc_stderr\": 0.012709037347346233,\n \"acc_norm\": 0.4511082138200782,\n \"acc_norm_stderr\": 0.012709037347346233\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.5698529411764706,\n \"acc_stderr\": 0.030074971917302875,\n \"acc_norm\": 0.5698529411764706,\n \"acc_norm_stderr\": 0.030074971917302875\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.6029411764705882,\n \"acc_stderr\": 0.019794488900024117,\n \"acc_norm\": 0.6029411764705882,\n \"acc_norm_stderr\": 0.019794488900024117\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6181818181818182,\n \"acc_stderr\": 0.046534298079135075,\n \"acc_norm\": 0.6181818181818182,\n \"acc_norm_stderr\": 0.046534298079135075\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.636734693877551,\n \"acc_stderr\": 0.030789051139030806,\n \"acc_norm\": 0.636734693877551,\n \"acc_norm_stderr\": 0.030789051139030806\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.7562189054726368,\n \"acc_stderr\": 0.030360490154014635,\n \"acc_norm\": 0.7562189054726368,\n \"acc_norm_stderr\": 0.030360490154014635\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.86,\n \"acc_stderr\": 0.03487350880197769,\n \"acc_norm\": 0.86,\n \"acc_norm_stderr\": 0.03487350880197769\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.41566265060240964,\n \"acc_stderr\": 0.03836722176598052,\n \"acc_norm\": 0.41566265060240964,\n \"acc_norm_stderr\": 0.03836722176598052\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.7543859649122807,\n \"acc_stderr\": 0.03301405946987249,\n \"acc_norm\": 0.7543859649122807,\n \"acc_norm_stderr\": 0.03301405946987249\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.379436964504284,\n \"mc1_stderr\": 0.016987039266142985,\n \"mc2\": 0.5444967551355537,\n \"mc2_stderr\": 0.015846880267326138\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7482241515390686,\n \"acc_stderr\": 0.012198489100259785\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.14859742228961334,\n \"acc_stderr\": 0.009797503180527883\n }\n}\n```", "repo_url": "https://huggingface.co/Undi95/PsyMedRP-v1-20B", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_02_16T06_33_57.302712", "path": ["**/details_harness|arc:challenge|25_2024-02-16T06-33-57.302712.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-02-16T06-33-57.302712.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_02_16T06_33_57.302712", "path": ["**/details_harness|gsm8k|5_2024-02-16T06-33-57.302712.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-02-16T06-33-57.302712.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_02_16T06_33_57.302712", "path": ["**/details_harness|hellaswag|10_2024-02-16T06-33-57.302712.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-02-16T06-33-57.302712.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_02_16T06_33_57.302712", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-16T06-33-57.302712.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-02-16T06-33-57.302712.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-02-16T06-33-57.302712.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-02-16T06-33-57.302712.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-16T06-33-57.302712.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-02-16T06-33-57.302712.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-02-16T06-33-57.302712.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-02-16T06-33-57.302712.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-02-16T06-33-57.302712.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-02-16T06-33-57.302712.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-02-16T06-33-57.302712.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-02-16T06-33-57.302712.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-16T06-33-57.302712.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-02-16T06-33-57.302712.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-16T06-33-57.302712.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-16T06-33-57.302712.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-02-16T06-33-57.302712.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-02-16T06-33-57.302712.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-02-16T06-33-57.302712.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-16T06-33-57.302712.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-16T06-33-57.302712.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-16T06-33-57.302712.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-02-16T06-33-57.302712.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-16T06-33-57.302712.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-16T06-33-57.302712.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-16T06-33-57.302712.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-16T06-33-57.302712.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-02-16T06-33-57.302712.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-16T06-33-57.302712.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-16T06-33-57.302712.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-16T06-33-57.302712.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-16T06-33-57.302712.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-02-16T06-33-57.302712.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-02-16T06-33-57.302712.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-02-16T06-33-57.302712.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-02-16T06-33-57.302712.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-16T06-33-57.302712.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-02-16T06-33-57.302712.parquet", "**/details_harness|hendrycksTest-management|5_2024-02-16T06-33-57.302712.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-02-16T06-33-57.302712.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-02-16T06-33-57.302712.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-02-16T06-33-57.302712.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-02-16T06-33-57.302712.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-16T06-33-57.302712.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-02-16T06-33-57.302712.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-02-16T06-33-57.302712.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-02-16T06-33-57.302712.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-02-16T06-33-57.302712.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-02-16T06-33-57.302712.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-02-16T06-33-57.302712.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-02-16T06-33-57.302712.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-02-16T06-33-57.302712.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-02-16T06-33-57.302712.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-02-16T06-33-57.302712.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-16T06-33-57.302712.parquet", "**/details_harness|hendrycksTest-virology|5_2024-02-16T06-33-57.302712.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-02-16T06-33-57.302712.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-16T06-33-57.302712.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-02-16T06-33-57.302712.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-02-16T06-33-57.302712.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-02-16T06-33-57.302712.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-16T06-33-57.302712.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-02-16T06-33-57.302712.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-02-16T06-33-57.302712.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-02-16T06-33-57.302712.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-02-16T06-33-57.302712.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-02-16T06-33-57.302712.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-02-16T06-33-57.302712.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-02-16T06-33-57.302712.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-16T06-33-57.302712.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-02-16T06-33-57.302712.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-16T06-33-57.302712.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-16T06-33-57.302712.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-02-16T06-33-57.302712.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-02-16T06-33-57.302712.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-02-16T06-33-57.302712.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-16T06-33-57.302712.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-16T06-33-57.302712.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-16T06-33-57.302712.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-02-16T06-33-57.302712.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-16T06-33-57.302712.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-16T06-33-57.302712.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-16T06-33-57.302712.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-16T06-33-57.302712.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-02-16T06-33-57.302712.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-16T06-33-57.302712.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-16T06-33-57.302712.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-16T06-33-57.302712.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-16T06-33-57.302712.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-02-16T06-33-57.302712.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-02-16T06-33-57.302712.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-02-16T06-33-57.302712.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-02-16T06-33-57.302712.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-16T06-33-57.302712.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-02-16T06-33-57.302712.parquet", "**/details_harness|hendrycksTest-management|5_2024-02-16T06-33-57.302712.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-02-16T06-33-57.302712.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-02-16T06-33-57.302712.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-02-16T06-33-57.302712.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-02-16T06-33-57.302712.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-16T06-33-57.302712.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-02-16T06-33-57.302712.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-02-16T06-33-57.302712.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-02-16T06-33-57.302712.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-02-16T06-33-57.302712.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-02-16T06-33-57.302712.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-02-16T06-33-57.302712.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-02-16T06-33-57.302712.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-02-16T06-33-57.302712.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-02-16T06-33-57.302712.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-02-16T06-33-57.302712.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-16T06-33-57.302712.parquet", "**/details_harness|hendrycksTest-virology|5_2024-02-16T06-33-57.302712.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-02-16T06-33-57.302712.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_02_16T06_33_57.302712", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-16T06-33-57.302712.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-16T06-33-57.302712.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_02_16T06_33_57.302712", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-02-16T06-33-57.302712.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-02-16T06-33-57.302712.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_02_16T06_33_57.302712", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-02-16T06-33-57.302712.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-02-16T06-33-57.302712.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_02_16T06_33_57.302712", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-02-16T06-33-57.302712.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-02-16T06-33-57.302712.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_02_16T06_33_57.302712", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-16T06-33-57.302712.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-16T06-33-57.302712.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_02_16T06_33_57.302712", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-02-16T06-33-57.302712.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-02-16T06-33-57.302712.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_02_16T06_33_57.302712", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-02-16T06-33-57.302712.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-02-16T06-33-57.302712.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_02_16T06_33_57.302712", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-02-16T06-33-57.302712.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-02-16T06-33-57.302712.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_02_16T06_33_57.302712", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-02-16T06-33-57.302712.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-02-16T06-33-57.302712.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_02_16T06_33_57.302712", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-02-16T06-33-57.302712.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-02-16T06-33-57.302712.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_02_16T06_33_57.302712", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-02-16T06-33-57.302712.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-02-16T06-33-57.302712.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_02_16T06_33_57.302712", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-02-16T06-33-57.302712.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-02-16T06-33-57.302712.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_02_16T06_33_57.302712", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-16T06-33-57.302712.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-16T06-33-57.302712.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_02_16T06_33_57.302712", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-02-16T06-33-57.302712.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-02-16T06-33-57.302712.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_02_16T06_33_57.302712", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-16T06-33-57.302712.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-16T06-33-57.302712.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_02_16T06_33_57.302712", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-16T06-33-57.302712.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-16T06-33-57.302712.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_02_16T06_33_57.302712", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-02-16T06-33-57.302712.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-02-16T06-33-57.302712.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_02_16T06_33_57.302712", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-02-16T06-33-57.302712.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-02-16T06-33-57.302712.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_02_16T06_33_57.302712", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-02-16T06-33-57.302712.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-02-16T06-33-57.302712.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_02_16T06_33_57.302712", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-16T06-33-57.302712.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-16T06-33-57.302712.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_02_16T06_33_57.302712", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-16T06-33-57.302712.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-16T06-33-57.302712.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_02_16T06_33_57.302712", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-16T06-33-57.302712.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-16T06-33-57.302712.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_02_16T06_33_57.302712", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-02-16T06-33-57.302712.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-02-16T06-33-57.302712.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_02_16T06_33_57.302712", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-16T06-33-57.302712.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-16T06-33-57.302712.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_02_16T06_33_57.302712", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-16T06-33-57.302712.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-16T06-33-57.302712.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_02_16T06_33_57.302712", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-16T06-33-57.302712.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-16T06-33-57.302712.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_02_16T06_33_57.302712", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-16T06-33-57.302712.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-16T06-33-57.302712.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_02_16T06_33_57.302712", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-02-16T06-33-57.302712.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-02-16T06-33-57.302712.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_02_16T06_33_57.302712", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-16T06-33-57.302712.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-16T06-33-57.302712.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_02_16T06_33_57.302712", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-16T06-33-57.302712.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-16T06-33-57.302712.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_02_16T06_33_57.302712", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-16T06-33-57.302712.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-16T06-33-57.302712.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_02_16T06_33_57.302712", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-16T06-33-57.302712.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-16T06-33-57.302712.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_02_16T06_33_57.302712", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-02-16T06-33-57.302712.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-02-16T06-33-57.302712.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_02_16T06_33_57.302712", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-02-16T06-33-57.302712.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-02-16T06-33-57.302712.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_02_16T06_33_57.302712", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-02-16T06-33-57.302712.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-02-16T06-33-57.302712.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_02_16T06_33_57.302712", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-02-16T06-33-57.302712.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-02-16T06-33-57.302712.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_02_16T06_33_57.302712", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-16T06-33-57.302712.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-16T06-33-57.302712.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_02_16T06_33_57.302712", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-02-16T06-33-57.302712.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-02-16T06-33-57.302712.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_02_16T06_33_57.302712", "path": ["**/details_harness|hendrycksTest-management|5_2024-02-16T06-33-57.302712.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-02-16T06-33-57.302712.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_02_16T06_33_57.302712", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-02-16T06-33-57.302712.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-02-16T06-33-57.302712.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_02_16T06_33_57.302712", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-02-16T06-33-57.302712.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-02-16T06-33-57.302712.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_02_16T06_33_57.302712", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-02-16T06-33-57.302712.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-02-16T06-33-57.302712.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_02_16T06_33_57.302712", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-02-16T06-33-57.302712.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-02-16T06-33-57.302712.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_02_16T06_33_57.302712", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-16T06-33-57.302712.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-16T06-33-57.302712.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_02_16T06_33_57.302712", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-02-16T06-33-57.302712.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-02-16T06-33-57.302712.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_02_16T06_33_57.302712", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-02-16T06-33-57.302712.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-02-16T06-33-57.302712.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_02_16T06_33_57.302712", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-02-16T06-33-57.302712.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-02-16T06-33-57.302712.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_02_16T06_33_57.302712", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-02-16T06-33-57.302712.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-02-16T06-33-57.302712.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_02_16T06_33_57.302712", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-02-16T06-33-57.302712.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-02-16T06-33-57.302712.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_02_16T06_33_57.302712", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-02-16T06-33-57.302712.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-02-16T06-33-57.302712.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_02_16T06_33_57.302712", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-02-16T06-33-57.302712.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-02-16T06-33-57.302712.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_02_16T06_33_57.302712", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-02-16T06-33-57.302712.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-02-16T06-33-57.302712.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_02_16T06_33_57.302712", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-02-16T06-33-57.302712.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-02-16T06-33-57.302712.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_02_16T06_33_57.302712", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-02-16T06-33-57.302712.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-02-16T06-33-57.302712.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_02_16T06_33_57.302712", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-16T06-33-57.302712.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-16T06-33-57.302712.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_02_16T06_33_57.302712", "path": ["**/details_harness|hendrycksTest-virology|5_2024-02-16T06-33-57.302712.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-02-16T06-33-57.302712.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_02_16T06_33_57.302712", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-02-16T06-33-57.302712.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-02-16T06-33-57.302712.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_02_16T06_33_57.302712", "path": ["**/details_harness|truthfulqa:mc|0_2024-02-16T06-33-57.302712.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-02-16T06-33-57.302712.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_02_16T06_33_57.302712", "path": ["**/details_harness|winogrande|5_2024-02-16T06-33-57.302712.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-02-16T06-33-57.302712.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_02_16T06_33_57.302712", "path": ["results_2024-02-16T06-33-57.302712.parquet"]}, {"split": "latest", "path": ["results_2024-02-16T06-33-57.302712.parquet"]}]}]}
2024-02-16T06:36:38+00:00
64f35b28ffff24cc2eb0773f807368da32873599
本数据集共138min,大概包含yousa的50首歌(大部分在2016-2022年),已经过切片处理并筛选,时长在4-15s,共796条wav音频数据。中文占绝大部分,有少量日文及极少英文。 This dataset consists of 138 minutes in total and approximately includes 50 songs by yousa (most of them released between 2016 and 2022). The dataset has been sliced and filtered, with durations ranging from 4 to 15 seconds, resulting in a total of 796 WAV audio files. The majority of the content is in Chinese, with a small amount in Japanese and very little in English.
yousaforever/yousa_data_0
[ "license:gpl-3.0", "region:us" ]
2024-02-16T06:49:58+00:00
{"license": "gpl-3.0"}
2024-02-16T06:59:56+00:00
cbad2bf81120a84400915e19ea76eb3fb437132a
bertram-gilfoyle/CC-MAIN-2023-40
[ "region:us" ]
2024-02-16T06:50:46+00:00
{}
2024-02-16T11:30:22+00:00
de13ee43091b66636c7be0bf0152a158fbe2a486
marcones/marcoselementar
[ "license:openrail", "region:us" ]
2024-02-16T06:58:39+00:00
{"license": "openrail"}
2024-02-16T06:58:52+00:00
5cc645f5f3e9116d75dfcc66d1ff3f4a497df607
大约9min的正常说话声音,划分为70个切片,可用于训练tts模型。 A 9-minute normal speaking voice divided into 70 slices for training a TTS model.
yousaforever/yousa_data_1
[ "license:gpl-3.0", "region:us" ]
2024-02-16T07:03:45+00:00
{"license": "gpl-3.0"}
2024-02-16T07:10:56+00:00
bec85a7b45ef8b3ed51710621404935762eb5ed6
Ketan3101/bot_train
[ "license:mit", "region:us" ]
2024-02-16T07:07:51+00:00
{"license": "mit"}
2024-02-16T09:53:44+00:00
192462fe2072e3a1d2c1e32c02170978328bddf1
# Dataset Card for Evaluation run of jisukim8873/falcon-7B-case-6 <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [jisukim8873/falcon-7B-case-6](https://huggingface.co/jisukim8873/falcon-7B-case-6) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_jisukim8873__falcon-7B-case-6", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-02-16T07:12:28.485530](https://huggingface.co/datasets/open-llm-leaderboard/details_jisukim8873__falcon-7B-case-6/blob/main/results_2024-02-16T07-12-28.485530.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.2999741752010719, "acc_stderr": 0.032195034392452436, "acc_norm": 0.30103224915319854, "acc_norm_stderr": 0.032944763241990214, "mc1": 0.25091799265605874, "mc1_stderr": 0.015176985027707687, "mc2": 0.364571668218642, "mc2_stderr": 0.014117416041879967 }, "harness|arc:challenge|25": { "acc": 0.4274744027303754, "acc_stderr": 0.014456862944650654, "acc_norm": 0.46501706484641636, "acc_norm_stderr": 0.014575583922019665 }, "harness|hellaswag|10": { "acc": 0.5976897032463653, "acc_stderr": 0.0048936170149753, "acc_norm": 0.7849034056960765, "acc_norm_stderr": 0.004100495978108428 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.36, "acc_stderr": 0.04824181513244218, "acc_norm": 0.36, "acc_norm_stderr": 0.04824181513244218 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.2962962962962963, "acc_stderr": 0.03944624162501116, "acc_norm": 0.2962962962962963, "acc_norm_stderr": 0.03944624162501116 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.3026315789473684, "acc_stderr": 0.037385206761196686, "acc_norm": 0.3026315789473684, "acc_norm_stderr": 0.037385206761196686 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.23, "acc_stderr": 0.042295258468165065, "acc_norm": 0.23, "acc_norm_stderr": 0.042295258468165065 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.3018867924528302, "acc_stderr": 0.028254200344438662, "acc_norm": 0.3018867924528302, "acc_norm_stderr": 0.028254200344438662 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.2638888888888889, "acc_stderr": 0.03685651095897532, "acc_norm": 0.2638888888888889, "acc_norm_stderr": 0.03685651095897532 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.18, "acc_stderr": 0.038612291966536955, "acc_norm": 0.18, "acc_norm_stderr": 0.038612291966536955 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.32, "acc_stderr": 0.046882617226215034, "acc_norm": 0.32, "acc_norm_stderr": 0.046882617226215034 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.3, "acc_stderr": 0.046056618647183814, "acc_norm": 0.3, "acc_norm_stderr": 0.046056618647183814 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.2658959537572254, "acc_stderr": 0.03368762932259431, "acc_norm": 0.2658959537572254, "acc_norm_stderr": 0.03368762932259431 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.21568627450980393, "acc_stderr": 0.040925639582376536, "acc_norm": 0.21568627450980393, "acc_norm_stderr": 0.040925639582376536 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.33, "acc_stderr": 0.04725815626252605, "acc_norm": 0.33, "acc_norm_stderr": 0.04725815626252605 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.3148936170212766, "acc_stderr": 0.03036358219723817, "acc_norm": 0.3148936170212766, "acc_norm_stderr": 0.03036358219723817 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.2894736842105263, "acc_stderr": 0.04266339443159394, "acc_norm": 0.2894736842105263, "acc_norm_stderr": 0.04266339443159394 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.27586206896551724, "acc_stderr": 0.037245636197746325, "acc_norm": 0.27586206896551724, "acc_norm_stderr": 0.037245636197746325 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.25925925925925924, "acc_stderr": 0.02256989707491841, "acc_norm": 0.25925925925925924, "acc_norm_stderr": 0.02256989707491841 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.1349206349206349, "acc_stderr": 0.030557101589417515, "acc_norm": 0.1349206349206349, "acc_norm_stderr": 0.030557101589417515 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.33, "acc_stderr": 0.04725815626252605, "acc_norm": 0.33, "acc_norm_stderr": 0.04725815626252605 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.33225806451612905, "acc_stderr": 0.02679556084812279, "acc_norm": 0.33225806451612905, "acc_norm_stderr": 0.02679556084812279 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.3497536945812808, "acc_stderr": 0.03355400904969566, "acc_norm": 0.3497536945812808, "acc_norm_stderr": 0.03355400904969566 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.31, "acc_stderr": 0.04648231987117316, "acc_norm": 0.31, "acc_norm_stderr": 0.04648231987117316 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.3151515151515151, "acc_stderr": 0.0362773057502241, "acc_norm": 0.3151515151515151, "acc_norm_stderr": 0.0362773057502241 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.30303030303030304, "acc_stderr": 0.03274287914026869, "acc_norm": 0.30303030303030304, "acc_norm_stderr": 0.03274287914026869 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.25906735751295334, "acc_stderr": 0.03161877917935411, "acc_norm": 0.25906735751295334, "acc_norm_stderr": 0.03161877917935411 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.24615384615384617, "acc_stderr": 0.021840866990423095, "acc_norm": 0.24615384615384617, "acc_norm_stderr": 0.021840866990423095 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.24444444444444444, "acc_stderr": 0.026202766534652155, "acc_norm": 0.24444444444444444, "acc_norm_stderr": 0.026202766534652155 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.24369747899159663, "acc_stderr": 0.027886828078380572, "acc_norm": 0.24369747899159663, "acc_norm_stderr": 0.027886828078380572 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.2781456953642384, "acc_stderr": 0.03658603262763743, "acc_norm": 0.2781456953642384, "acc_norm_stderr": 0.03658603262763743 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.28990825688073396, "acc_stderr": 0.019453066609201597, "acc_norm": 0.28990825688073396, "acc_norm_stderr": 0.019453066609201597 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.19444444444444445, "acc_stderr": 0.026991454502036744, "acc_norm": 0.19444444444444445, "acc_norm_stderr": 0.026991454502036744 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.27450980392156865, "acc_stderr": 0.03132179803083289, "acc_norm": 0.27450980392156865, "acc_norm_stderr": 0.03132179803083289 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.31645569620253167, "acc_stderr": 0.03027497488021897, "acc_norm": 0.31645569620253167, "acc_norm_stderr": 0.03027497488021897 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.37668161434977576, "acc_stderr": 0.03252113489929188, "acc_norm": 0.37668161434977576, "acc_norm_stderr": 0.03252113489929188 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.26717557251908397, "acc_stderr": 0.03880848301082396, "acc_norm": 0.26717557251908397, "acc_norm_stderr": 0.03880848301082396 }, "harness|hendrycksTest-international_law|5": { "acc": 0.4132231404958678, "acc_stderr": 0.04495087843548408, "acc_norm": 0.4132231404958678, "acc_norm_stderr": 0.04495087843548408 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.3148148148148148, "acc_stderr": 0.04489931073591312, "acc_norm": 0.3148148148148148, "acc_norm_stderr": 0.04489931073591312 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.2883435582822086, "acc_stderr": 0.035590395316173425, "acc_norm": 0.2883435582822086, "acc_norm_stderr": 0.035590395316173425 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.2857142857142857, "acc_stderr": 0.042878587513404565, "acc_norm": 0.2857142857142857, "acc_norm_stderr": 0.042878587513404565 }, "harness|hendrycksTest-management|5": { "acc": 0.32038834951456313, "acc_stderr": 0.04620284082280039, "acc_norm": 0.32038834951456313, "acc_norm_stderr": 0.04620284082280039 }, "harness|hendrycksTest-marketing|5": { "acc": 0.3076923076923077, "acc_stderr": 0.03023638994217307, "acc_norm": 0.3076923076923077, "acc_norm_stderr": 0.03023638994217307 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.31, "acc_stderr": 0.04648231987117316, "acc_norm": 0.31, "acc_norm_stderr": 0.04648231987117316 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.3537675606641124, "acc_stderr": 0.017098184708161903, "acc_norm": 0.3537675606641124, "acc_norm_stderr": 0.017098184708161903 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.3236994219653179, "acc_stderr": 0.025190181327608422, "acc_norm": 0.3236994219653179, "acc_norm_stderr": 0.025190181327608422 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.24692737430167597, "acc_stderr": 0.014422292204808835, "acc_norm": 0.24692737430167597, "acc_norm_stderr": 0.014422292204808835 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.3202614379084967, "acc_stderr": 0.026716118380156844, "acc_norm": 0.3202614379084967, "acc_norm_stderr": 0.026716118380156844 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.3183279742765273, "acc_stderr": 0.026457225067811025, "acc_norm": 0.3183279742765273, "acc_norm_stderr": 0.026457225067811025 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.2777777777777778, "acc_stderr": 0.024922001168886335, "acc_norm": 0.2777777777777778, "acc_norm_stderr": 0.024922001168886335 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.24113475177304963, "acc_stderr": 0.02551873104953776, "acc_norm": 0.24113475177304963, "acc_norm_stderr": 0.02551873104953776 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.2627118644067797, "acc_stderr": 0.01124054551499567, "acc_norm": 0.2627118644067797, "acc_norm_stderr": 0.01124054551499567 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.21323529411764705, "acc_stderr": 0.024880971512294292, "acc_norm": 0.21323529411764705, "acc_norm_stderr": 0.024880971512294292 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.2630718954248366, "acc_stderr": 0.017812676542320657, "acc_norm": 0.2630718954248366, "acc_norm_stderr": 0.017812676542320657 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.24545454545454545, "acc_stderr": 0.04122066502878284, "acc_norm": 0.24545454545454545, "acc_norm_stderr": 0.04122066502878284 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.24489795918367346, "acc_stderr": 0.02752963744017493, "acc_norm": 0.24489795918367346, "acc_norm_stderr": 0.02752963744017493 }, "harness|hendrycksTest-sociology|5": { "acc": 0.3034825870646766, "acc_stderr": 0.032510068164586174, "acc_norm": 0.3034825870646766, "acc_norm_stderr": 0.032510068164586174 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.42, "acc_stderr": 0.049604496374885836, "acc_norm": 0.42, "acc_norm_stderr": 0.049604496374885836 }, "harness|hendrycksTest-virology|5": { "acc": 0.3253012048192771, "acc_stderr": 0.03647168523683227, "acc_norm": 0.3253012048192771, "acc_norm_stderr": 0.03647168523683227 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.3391812865497076, "acc_stderr": 0.03631053496488905, "acc_norm": 0.3391812865497076, "acc_norm_stderr": 0.03631053496488905 }, "harness|truthfulqa:mc|0": { "mc1": 0.25091799265605874, "mc1_stderr": 0.015176985027707687, "mc2": 0.364571668218642, "mc2_stderr": 0.014117416041879967 }, "harness|winogrande|5": { "acc": 0.7008681925808997, "acc_stderr": 0.012868639066091541 }, "harness|gsm8k|5": { "acc": 0.06141015921152388, "acc_stderr": 0.006613027536586305 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_jisukim8873__falcon-7B-case-6
[ "region:us" ]
2024-02-16T07:14:11+00:00
{"pretty_name": "Evaluation run of jisukim8873/falcon-7B-case-6", "dataset_summary": "Dataset automatically created during the evaluation run of model [jisukim8873/falcon-7B-case-6](https://huggingface.co/jisukim8873/falcon-7B-case-6) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_jisukim8873__falcon-7B-case-6\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-02-16T07:12:28.485530](https://huggingface.co/datasets/open-llm-leaderboard/details_jisukim8873__falcon-7B-case-6/blob/main/results_2024-02-16T07-12-28.485530.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.2999741752010719,\n \"acc_stderr\": 0.032195034392452436,\n \"acc_norm\": 0.30103224915319854,\n \"acc_norm_stderr\": 0.032944763241990214,\n \"mc1\": 0.25091799265605874,\n \"mc1_stderr\": 0.015176985027707687,\n \"mc2\": 0.364571668218642,\n \"mc2_stderr\": 0.014117416041879967\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.4274744027303754,\n \"acc_stderr\": 0.014456862944650654,\n \"acc_norm\": 0.46501706484641636,\n \"acc_norm_stderr\": 0.014575583922019665\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.5976897032463653,\n \"acc_stderr\": 0.0048936170149753,\n \"acc_norm\": 0.7849034056960765,\n \"acc_norm_stderr\": 0.004100495978108428\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.36,\n \"acc_stderr\": 0.04824181513244218,\n \"acc_norm\": 0.36,\n \"acc_norm_stderr\": 0.04824181513244218\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.2962962962962963,\n \"acc_stderr\": 0.03944624162501116,\n \"acc_norm\": 0.2962962962962963,\n \"acc_norm_stderr\": 0.03944624162501116\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.3026315789473684,\n \"acc_stderr\": 0.037385206761196686,\n \"acc_norm\": 0.3026315789473684,\n \"acc_norm_stderr\": 0.037385206761196686\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.23,\n \"acc_stderr\": 0.042295258468165065,\n \"acc_norm\": 0.23,\n \"acc_norm_stderr\": 0.042295258468165065\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.3018867924528302,\n \"acc_stderr\": 0.028254200344438662,\n \"acc_norm\": 0.3018867924528302,\n \"acc_norm_stderr\": 0.028254200344438662\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.2638888888888889,\n \"acc_stderr\": 0.03685651095897532,\n \"acc_norm\": 0.2638888888888889,\n \"acc_norm_stderr\": 0.03685651095897532\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.18,\n \"acc_stderr\": 0.038612291966536955,\n \"acc_norm\": 0.18,\n \"acc_norm_stderr\": 0.038612291966536955\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.32,\n \"acc_stderr\": 0.046882617226215034,\n \"acc_norm\": 0.32,\n \"acc_norm_stderr\": 0.046882617226215034\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.3,\n \"acc_stderr\": 0.046056618647183814,\n \"acc_norm\": 0.3,\n \"acc_norm_stderr\": 0.046056618647183814\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.2658959537572254,\n \"acc_stderr\": 0.03368762932259431,\n \"acc_norm\": 0.2658959537572254,\n \"acc_norm_stderr\": 0.03368762932259431\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.21568627450980393,\n \"acc_stderr\": 0.040925639582376536,\n \"acc_norm\": 0.21568627450980393,\n \"acc_norm_stderr\": 0.040925639582376536\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.33,\n \"acc_stderr\": 0.04725815626252605,\n \"acc_norm\": 0.33,\n \"acc_norm_stderr\": 0.04725815626252605\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.3148936170212766,\n \"acc_stderr\": 0.03036358219723817,\n \"acc_norm\": 0.3148936170212766,\n \"acc_norm_stderr\": 0.03036358219723817\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.2894736842105263,\n \"acc_stderr\": 0.04266339443159394,\n \"acc_norm\": 0.2894736842105263,\n \"acc_norm_stderr\": 0.04266339443159394\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.27586206896551724,\n \"acc_stderr\": 0.037245636197746325,\n \"acc_norm\": 0.27586206896551724,\n \"acc_norm_stderr\": 0.037245636197746325\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.25925925925925924,\n \"acc_stderr\": 0.02256989707491841,\n \"acc_norm\": 0.25925925925925924,\n \"acc_norm_stderr\": 0.02256989707491841\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.1349206349206349,\n \"acc_stderr\": 0.030557101589417515,\n \"acc_norm\": 0.1349206349206349,\n \"acc_norm_stderr\": 0.030557101589417515\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.33,\n \"acc_stderr\": 0.04725815626252605,\n \"acc_norm\": 0.33,\n \"acc_norm_stderr\": 0.04725815626252605\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.33225806451612905,\n \"acc_stderr\": 0.02679556084812279,\n \"acc_norm\": 0.33225806451612905,\n \"acc_norm_stderr\": 0.02679556084812279\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.3497536945812808,\n \"acc_stderr\": 0.03355400904969566,\n \"acc_norm\": 0.3497536945812808,\n \"acc_norm_stderr\": 0.03355400904969566\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.31,\n \"acc_stderr\": 0.04648231987117316,\n \"acc_norm\": 0.31,\n \"acc_norm_stderr\": 0.04648231987117316\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.3151515151515151,\n \"acc_stderr\": 0.0362773057502241,\n \"acc_norm\": 0.3151515151515151,\n \"acc_norm_stderr\": 0.0362773057502241\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.30303030303030304,\n \"acc_stderr\": 0.03274287914026869,\n \"acc_norm\": 0.30303030303030304,\n \"acc_norm_stderr\": 0.03274287914026869\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.25906735751295334,\n \"acc_stderr\": 0.03161877917935411,\n \"acc_norm\": 0.25906735751295334,\n \"acc_norm_stderr\": 0.03161877917935411\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.24615384615384617,\n \"acc_stderr\": 0.021840866990423095,\n \"acc_norm\": 0.24615384615384617,\n \"acc_norm_stderr\": 0.021840866990423095\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.24444444444444444,\n \"acc_stderr\": 0.026202766534652155,\n \"acc_norm\": 0.24444444444444444,\n \"acc_norm_stderr\": 0.026202766534652155\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.24369747899159663,\n \"acc_stderr\": 0.027886828078380572,\n \"acc_norm\": 0.24369747899159663,\n \"acc_norm_stderr\": 0.027886828078380572\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.2781456953642384,\n \"acc_stderr\": 0.03658603262763743,\n \"acc_norm\": 0.2781456953642384,\n \"acc_norm_stderr\": 0.03658603262763743\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.28990825688073396,\n \"acc_stderr\": 0.019453066609201597,\n \"acc_norm\": 0.28990825688073396,\n \"acc_norm_stderr\": 0.019453066609201597\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.19444444444444445,\n \"acc_stderr\": 0.026991454502036744,\n \"acc_norm\": 0.19444444444444445,\n \"acc_norm_stderr\": 0.026991454502036744\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.27450980392156865,\n \"acc_stderr\": 0.03132179803083289,\n \"acc_norm\": 0.27450980392156865,\n \"acc_norm_stderr\": 0.03132179803083289\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.31645569620253167,\n \"acc_stderr\": 0.03027497488021897,\n \"acc_norm\": 0.31645569620253167,\n \"acc_norm_stderr\": 0.03027497488021897\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.37668161434977576,\n \"acc_stderr\": 0.03252113489929188,\n \"acc_norm\": 0.37668161434977576,\n \"acc_norm_stderr\": 0.03252113489929188\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.26717557251908397,\n \"acc_stderr\": 0.03880848301082396,\n \"acc_norm\": 0.26717557251908397,\n \"acc_norm_stderr\": 0.03880848301082396\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.4132231404958678,\n \"acc_stderr\": 0.04495087843548408,\n \"acc_norm\": 0.4132231404958678,\n \"acc_norm_stderr\": 0.04495087843548408\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.3148148148148148,\n \"acc_stderr\": 0.04489931073591312,\n \"acc_norm\": 0.3148148148148148,\n \"acc_norm_stderr\": 0.04489931073591312\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.2883435582822086,\n \"acc_stderr\": 0.035590395316173425,\n \"acc_norm\": 0.2883435582822086,\n \"acc_norm_stderr\": 0.035590395316173425\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.2857142857142857,\n \"acc_stderr\": 0.042878587513404565,\n \"acc_norm\": 0.2857142857142857,\n \"acc_norm_stderr\": 0.042878587513404565\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.32038834951456313,\n \"acc_stderr\": 0.04620284082280039,\n \"acc_norm\": 0.32038834951456313,\n \"acc_norm_stderr\": 0.04620284082280039\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.3076923076923077,\n \"acc_stderr\": 0.03023638994217307,\n \"acc_norm\": 0.3076923076923077,\n \"acc_norm_stderr\": 0.03023638994217307\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.31,\n \"acc_stderr\": 0.04648231987117316,\n \"acc_norm\": 0.31,\n \"acc_norm_stderr\": 0.04648231987117316\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.3537675606641124,\n \"acc_stderr\": 0.017098184708161903,\n \"acc_norm\": 0.3537675606641124,\n \"acc_norm_stderr\": 0.017098184708161903\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.3236994219653179,\n \"acc_stderr\": 0.025190181327608422,\n \"acc_norm\": 0.3236994219653179,\n \"acc_norm_stderr\": 0.025190181327608422\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.24692737430167597,\n \"acc_stderr\": 0.014422292204808835,\n \"acc_norm\": 0.24692737430167597,\n \"acc_norm_stderr\": 0.014422292204808835\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.3202614379084967,\n \"acc_stderr\": 0.026716118380156844,\n \"acc_norm\": 0.3202614379084967,\n \"acc_norm_stderr\": 0.026716118380156844\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.3183279742765273,\n \"acc_stderr\": 0.026457225067811025,\n \"acc_norm\": 0.3183279742765273,\n \"acc_norm_stderr\": 0.026457225067811025\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.2777777777777778,\n \"acc_stderr\": 0.024922001168886335,\n \"acc_norm\": 0.2777777777777778,\n \"acc_norm_stderr\": 0.024922001168886335\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.24113475177304963,\n \"acc_stderr\": 0.02551873104953776,\n \"acc_norm\": 0.24113475177304963,\n \"acc_norm_stderr\": 0.02551873104953776\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.2627118644067797,\n \"acc_stderr\": 0.01124054551499567,\n \"acc_norm\": 0.2627118644067797,\n \"acc_norm_stderr\": 0.01124054551499567\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.21323529411764705,\n \"acc_stderr\": 0.024880971512294292,\n \"acc_norm\": 0.21323529411764705,\n \"acc_norm_stderr\": 0.024880971512294292\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.2630718954248366,\n \"acc_stderr\": 0.017812676542320657,\n \"acc_norm\": 0.2630718954248366,\n \"acc_norm_stderr\": 0.017812676542320657\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.24545454545454545,\n \"acc_stderr\": 0.04122066502878284,\n \"acc_norm\": 0.24545454545454545,\n \"acc_norm_stderr\": 0.04122066502878284\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.24489795918367346,\n \"acc_stderr\": 0.02752963744017493,\n \"acc_norm\": 0.24489795918367346,\n \"acc_norm_stderr\": 0.02752963744017493\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.3034825870646766,\n \"acc_stderr\": 0.032510068164586174,\n \"acc_norm\": 0.3034825870646766,\n \"acc_norm_stderr\": 0.032510068164586174\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.42,\n \"acc_stderr\": 0.049604496374885836,\n \"acc_norm\": 0.42,\n \"acc_norm_stderr\": 0.049604496374885836\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.3253012048192771,\n \"acc_stderr\": 0.03647168523683227,\n \"acc_norm\": 0.3253012048192771,\n \"acc_norm_stderr\": 0.03647168523683227\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.3391812865497076,\n \"acc_stderr\": 0.03631053496488905,\n \"acc_norm\": 0.3391812865497076,\n \"acc_norm_stderr\": 0.03631053496488905\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.25091799265605874,\n \"mc1_stderr\": 0.015176985027707687,\n \"mc2\": 0.364571668218642,\n \"mc2_stderr\": 0.014117416041879967\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7008681925808997,\n \"acc_stderr\": 0.012868639066091541\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.06141015921152388,\n \"acc_stderr\": 0.006613027536586305\n }\n}\n```", "repo_url": "https://huggingface.co/jisukim8873/falcon-7B-case-6", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_02_16T07_12_28.485530", "path": ["**/details_harness|arc:challenge|25_2024-02-16T07-12-28.485530.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-02-16T07-12-28.485530.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_02_16T07_12_28.485530", "path": ["**/details_harness|gsm8k|5_2024-02-16T07-12-28.485530.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-02-16T07-12-28.485530.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_02_16T07_12_28.485530", "path": ["**/details_harness|hellaswag|10_2024-02-16T07-12-28.485530.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-02-16T07-12-28.485530.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_02_16T07_12_28.485530", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-16T07-12-28.485530.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-02-16T07-12-28.485530.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-02-16T07-12-28.485530.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-02-16T07-12-28.485530.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-16T07-12-28.485530.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-02-16T07-12-28.485530.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-02-16T07-12-28.485530.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-02-16T07-12-28.485530.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-02-16T07-12-28.485530.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-02-16T07-12-28.485530.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-02-16T07-12-28.485530.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-02-16T07-12-28.485530.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-16T07-12-28.485530.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-02-16T07-12-28.485530.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-16T07-12-28.485530.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-16T07-12-28.485530.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-02-16T07-12-28.485530.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-02-16T07-12-28.485530.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-02-16T07-12-28.485530.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-16T07-12-28.485530.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-16T07-12-28.485530.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-16T07-12-28.485530.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-02-16T07-12-28.485530.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-16T07-12-28.485530.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-16T07-12-28.485530.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-16T07-12-28.485530.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-16T07-12-28.485530.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-02-16T07-12-28.485530.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-16T07-12-28.485530.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-16T07-12-28.485530.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-16T07-12-28.485530.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-16T07-12-28.485530.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-02-16T07-12-28.485530.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-02-16T07-12-28.485530.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-02-16T07-12-28.485530.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-02-16T07-12-28.485530.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-16T07-12-28.485530.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-02-16T07-12-28.485530.parquet", "**/details_harness|hendrycksTest-management|5_2024-02-16T07-12-28.485530.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-02-16T07-12-28.485530.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-02-16T07-12-28.485530.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-02-16T07-12-28.485530.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-02-16T07-12-28.485530.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-16T07-12-28.485530.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-02-16T07-12-28.485530.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-02-16T07-12-28.485530.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-02-16T07-12-28.485530.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-02-16T07-12-28.485530.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-02-16T07-12-28.485530.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-02-16T07-12-28.485530.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-02-16T07-12-28.485530.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-02-16T07-12-28.485530.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-02-16T07-12-28.485530.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-02-16T07-12-28.485530.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-16T07-12-28.485530.parquet", "**/details_harness|hendrycksTest-virology|5_2024-02-16T07-12-28.485530.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-02-16T07-12-28.485530.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-16T07-12-28.485530.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-02-16T07-12-28.485530.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-02-16T07-12-28.485530.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-02-16T07-12-28.485530.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-16T07-12-28.485530.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-02-16T07-12-28.485530.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-02-16T07-12-28.485530.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-02-16T07-12-28.485530.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-02-16T07-12-28.485530.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-02-16T07-12-28.485530.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-02-16T07-12-28.485530.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-02-16T07-12-28.485530.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-16T07-12-28.485530.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-02-16T07-12-28.485530.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-16T07-12-28.485530.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-16T07-12-28.485530.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-02-16T07-12-28.485530.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-02-16T07-12-28.485530.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-02-16T07-12-28.485530.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-16T07-12-28.485530.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-16T07-12-28.485530.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-16T07-12-28.485530.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-02-16T07-12-28.485530.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-16T07-12-28.485530.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-16T07-12-28.485530.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-16T07-12-28.485530.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-16T07-12-28.485530.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-02-16T07-12-28.485530.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-16T07-12-28.485530.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-16T07-12-28.485530.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-16T07-12-28.485530.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-16T07-12-28.485530.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-02-16T07-12-28.485530.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-02-16T07-12-28.485530.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-02-16T07-12-28.485530.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-02-16T07-12-28.485530.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-16T07-12-28.485530.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-02-16T07-12-28.485530.parquet", "**/details_harness|hendrycksTest-management|5_2024-02-16T07-12-28.485530.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-02-16T07-12-28.485530.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-02-16T07-12-28.485530.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-02-16T07-12-28.485530.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-02-16T07-12-28.485530.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-16T07-12-28.485530.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-02-16T07-12-28.485530.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-02-16T07-12-28.485530.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-02-16T07-12-28.485530.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-02-16T07-12-28.485530.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-02-16T07-12-28.485530.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-02-16T07-12-28.485530.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-02-16T07-12-28.485530.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-02-16T07-12-28.485530.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-02-16T07-12-28.485530.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-02-16T07-12-28.485530.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-16T07-12-28.485530.parquet", "**/details_harness|hendrycksTest-virology|5_2024-02-16T07-12-28.485530.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-02-16T07-12-28.485530.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_02_16T07_12_28.485530", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-16T07-12-28.485530.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-16T07-12-28.485530.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_02_16T07_12_28.485530", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-02-16T07-12-28.485530.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-02-16T07-12-28.485530.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_02_16T07_12_28.485530", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-02-16T07-12-28.485530.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-02-16T07-12-28.485530.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_02_16T07_12_28.485530", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-02-16T07-12-28.485530.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-02-16T07-12-28.485530.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_02_16T07_12_28.485530", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-16T07-12-28.485530.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-16T07-12-28.485530.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_02_16T07_12_28.485530", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-02-16T07-12-28.485530.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-02-16T07-12-28.485530.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_02_16T07_12_28.485530", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-02-16T07-12-28.485530.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-02-16T07-12-28.485530.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_02_16T07_12_28.485530", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-02-16T07-12-28.485530.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-02-16T07-12-28.485530.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_02_16T07_12_28.485530", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-02-16T07-12-28.485530.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-02-16T07-12-28.485530.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_02_16T07_12_28.485530", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-02-16T07-12-28.485530.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-02-16T07-12-28.485530.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_02_16T07_12_28.485530", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-02-16T07-12-28.485530.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-02-16T07-12-28.485530.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_02_16T07_12_28.485530", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-02-16T07-12-28.485530.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-02-16T07-12-28.485530.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_02_16T07_12_28.485530", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-16T07-12-28.485530.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-16T07-12-28.485530.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_02_16T07_12_28.485530", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-02-16T07-12-28.485530.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-02-16T07-12-28.485530.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_02_16T07_12_28.485530", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-16T07-12-28.485530.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-16T07-12-28.485530.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_02_16T07_12_28.485530", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-16T07-12-28.485530.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-16T07-12-28.485530.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_02_16T07_12_28.485530", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-02-16T07-12-28.485530.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-02-16T07-12-28.485530.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_02_16T07_12_28.485530", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-02-16T07-12-28.485530.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-02-16T07-12-28.485530.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_02_16T07_12_28.485530", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-02-16T07-12-28.485530.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-02-16T07-12-28.485530.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_02_16T07_12_28.485530", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-16T07-12-28.485530.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-16T07-12-28.485530.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_02_16T07_12_28.485530", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-16T07-12-28.485530.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-16T07-12-28.485530.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_02_16T07_12_28.485530", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-16T07-12-28.485530.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-16T07-12-28.485530.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_02_16T07_12_28.485530", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-02-16T07-12-28.485530.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-02-16T07-12-28.485530.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_02_16T07_12_28.485530", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-16T07-12-28.485530.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-16T07-12-28.485530.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_02_16T07_12_28.485530", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-16T07-12-28.485530.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-16T07-12-28.485530.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_02_16T07_12_28.485530", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-16T07-12-28.485530.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-16T07-12-28.485530.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_02_16T07_12_28.485530", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-16T07-12-28.485530.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-16T07-12-28.485530.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_02_16T07_12_28.485530", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-02-16T07-12-28.485530.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-02-16T07-12-28.485530.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_02_16T07_12_28.485530", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-16T07-12-28.485530.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-16T07-12-28.485530.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_02_16T07_12_28.485530", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-16T07-12-28.485530.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-16T07-12-28.485530.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_02_16T07_12_28.485530", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-16T07-12-28.485530.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-16T07-12-28.485530.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_02_16T07_12_28.485530", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-16T07-12-28.485530.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-16T07-12-28.485530.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_02_16T07_12_28.485530", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-02-16T07-12-28.485530.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-02-16T07-12-28.485530.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_02_16T07_12_28.485530", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-02-16T07-12-28.485530.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-02-16T07-12-28.485530.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_02_16T07_12_28.485530", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-02-16T07-12-28.485530.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-02-16T07-12-28.485530.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_02_16T07_12_28.485530", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-02-16T07-12-28.485530.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-02-16T07-12-28.485530.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_02_16T07_12_28.485530", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-16T07-12-28.485530.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-16T07-12-28.485530.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_02_16T07_12_28.485530", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-02-16T07-12-28.485530.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-02-16T07-12-28.485530.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_02_16T07_12_28.485530", "path": ["**/details_harness|hendrycksTest-management|5_2024-02-16T07-12-28.485530.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-02-16T07-12-28.485530.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_02_16T07_12_28.485530", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-02-16T07-12-28.485530.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-02-16T07-12-28.485530.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_02_16T07_12_28.485530", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-02-16T07-12-28.485530.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-02-16T07-12-28.485530.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_02_16T07_12_28.485530", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-02-16T07-12-28.485530.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-02-16T07-12-28.485530.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_02_16T07_12_28.485530", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-02-16T07-12-28.485530.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-02-16T07-12-28.485530.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_02_16T07_12_28.485530", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-16T07-12-28.485530.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-16T07-12-28.485530.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_02_16T07_12_28.485530", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-02-16T07-12-28.485530.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-02-16T07-12-28.485530.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_02_16T07_12_28.485530", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-02-16T07-12-28.485530.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-02-16T07-12-28.485530.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_02_16T07_12_28.485530", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-02-16T07-12-28.485530.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-02-16T07-12-28.485530.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_02_16T07_12_28.485530", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-02-16T07-12-28.485530.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-02-16T07-12-28.485530.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_02_16T07_12_28.485530", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-02-16T07-12-28.485530.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-02-16T07-12-28.485530.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_02_16T07_12_28.485530", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-02-16T07-12-28.485530.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-02-16T07-12-28.485530.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_02_16T07_12_28.485530", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-02-16T07-12-28.485530.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-02-16T07-12-28.485530.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_02_16T07_12_28.485530", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-02-16T07-12-28.485530.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-02-16T07-12-28.485530.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_02_16T07_12_28.485530", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-02-16T07-12-28.485530.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-02-16T07-12-28.485530.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_02_16T07_12_28.485530", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-02-16T07-12-28.485530.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-02-16T07-12-28.485530.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_02_16T07_12_28.485530", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-16T07-12-28.485530.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-16T07-12-28.485530.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_02_16T07_12_28.485530", "path": ["**/details_harness|hendrycksTest-virology|5_2024-02-16T07-12-28.485530.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-02-16T07-12-28.485530.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_02_16T07_12_28.485530", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-02-16T07-12-28.485530.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-02-16T07-12-28.485530.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_02_16T07_12_28.485530", "path": ["**/details_harness|truthfulqa:mc|0_2024-02-16T07-12-28.485530.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-02-16T07-12-28.485530.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_02_16T07_12_28.485530", "path": ["**/details_harness|winogrande|5_2024-02-16T07-12-28.485530.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-02-16T07-12-28.485530.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_02_16T07_12_28.485530", "path": ["results_2024-02-16T07-12-28.485530.parquet"]}, {"split": "latest", "path": ["results_2024-02-16T07-12-28.485530.parquet"]}]}]}
2024-02-16T07:14:33+00:00
17b17b1a5a333d153426db496a222394f81ff515
shrms/structured-data
[ "region:us" ]
2024-02-16T07:16:45+00:00
{}
2024-02-16T07:16:45+00:00
bb6f259fcae3ac2babbd461480306097b56e0fd4
doanhm/images_input
[ "region:us" ]
2024-02-16T07:22:50+00:00
{}
2024-02-16T07:33:03+00:00
3e28d7e97cd337e7204aaa32ede07ee661e32daa
KimByeongSu/full
[ "region:us" ]
2024-02-16T07:23:47+00:00
{}
2024-02-16T07:24:04+00:00
792f28be3d1964ddbf74fc09ada1bce675a765bf
andersonbcdefg/misc_qa_pairs_v2_deduped_cf
[ "region:us" ]
2024-02-16T07:28:50+00:00
{"dataset_info": {"features": [{"name": "query", "dtype": "string"}, {"name": "pos", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 745217529.7175856, "num_examples": 1336274}], "download_size": 515527807, "dataset_size": 745217529.7175856}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]}
2024-02-16T07:35:10+00:00
2a62a9ea17e2bcf749ee557d849856f73fb585ba
ll00292007/my-dataset-lora
[ "region:us" ]
2024-02-16T07:32:15+00:00
{}
2024-02-16T07:32:15+00:00
4435181c6da6a5236a0ad5a0c9df3f102c797dbe
HEMASENTHIL/NEWDEMO
[ "region:us" ]
2024-02-16T07:35:46+00:00
{"dataset_info": {"features": [{"name": "english", "dtype": "string"}, {"name": "tamil", "dtype": "string"}, {"name": "text", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 6350, "num_examples": 9}], "download_size": 9674, "dataset_size": 6350}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]}
2024-02-16T07:38:51+00:00
28c35a4152ed69188fc9eb344579fc577016c925
# Dataset Card for Evaluation run of Kquant03/Buttercup-V2-laser <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [Kquant03/Buttercup-V2-laser](https://huggingface.co/Kquant03/Buttercup-V2-laser) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_Kquant03__Buttercup-V2-laser", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-02-16T07:34:11.973720](https://huggingface.co/datasets/open-llm-leaderboard/details_Kquant03__Buttercup-V2-laser/blob/main/results_2024-02-16T07-34-11.973720.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.6535761549256881, "acc_stderr": 0.03205604876868876, "acc_norm": 0.6528640185317818, "acc_norm_stderr": 0.032733047429496384, "mc1": 0.5520195838433293, "mc1_stderr": 0.017408513063422917, "mc2": 0.6899750707536572, "mc2_stderr": 0.01507018824423322 }, "harness|arc:challenge|25": { "acc": 0.7081911262798635, "acc_stderr": 0.013284525292403511, "acc_norm": 0.7312286689419796, "acc_norm_stderr": 0.012955065963710698 }, "harness|hellaswag|10": { "acc": 0.7135032861979685, "acc_stderr": 0.004512002459757956, "acc_norm": 0.8847839075881299, "acc_norm_stderr": 0.0031863002304505753 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.33, "acc_stderr": 0.047258156262526045, "acc_norm": 0.33, "acc_norm_stderr": 0.047258156262526045 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.6592592592592592, "acc_stderr": 0.040943762699967926, "acc_norm": 0.6592592592592592, "acc_norm_stderr": 0.040943762699967926 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.6973684210526315, "acc_stderr": 0.03738520676119669, "acc_norm": 0.6973684210526315, "acc_norm_stderr": 0.03738520676119669 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.64, "acc_stderr": 0.04824181513244218, "acc_norm": 0.64, "acc_norm_stderr": 0.04824181513244218 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.7018867924528301, "acc_stderr": 0.02815283794249387, "acc_norm": 0.7018867924528301, "acc_norm_stderr": 0.02815283794249387 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.7777777777777778, "acc_stderr": 0.03476590104304134, "acc_norm": 0.7777777777777778, "acc_norm_stderr": 0.03476590104304134 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.44, "acc_stderr": 0.04988876515698589, "acc_norm": 0.44, "acc_norm_stderr": 0.04988876515698589 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.52, "acc_stderr": 0.050211673156867795, "acc_norm": 0.52, "acc_norm_stderr": 0.050211673156867795 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.31, "acc_stderr": 0.04648231987117316, "acc_norm": 0.31, "acc_norm_stderr": 0.04648231987117316 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.6705202312138728, "acc_stderr": 0.03583901754736412, "acc_norm": 0.6705202312138728, "acc_norm_stderr": 0.03583901754736412 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.4117647058823529, "acc_stderr": 0.048971049527263666, "acc_norm": 0.4117647058823529, "acc_norm_stderr": 0.048971049527263666 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.76, "acc_stderr": 0.04292346959909283, "acc_norm": 0.76, "acc_norm_stderr": 0.04292346959909283 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.5702127659574469, "acc_stderr": 0.03236214467715564, "acc_norm": 0.5702127659574469, "acc_norm_stderr": 0.03236214467715564 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.5175438596491229, "acc_stderr": 0.04700708033551038, "acc_norm": 0.5175438596491229, "acc_norm_stderr": 0.04700708033551038 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.5724137931034483, "acc_stderr": 0.04122737111370333, "acc_norm": 0.5724137931034483, "acc_norm_stderr": 0.04122737111370333 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.43386243386243384, "acc_stderr": 0.025525034382474887, "acc_norm": 0.43386243386243384, "acc_norm_stderr": 0.025525034382474887 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.48412698412698413, "acc_stderr": 0.04469881854072606, "acc_norm": 0.48412698412698413, "acc_norm_stderr": 0.04469881854072606 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.31, "acc_stderr": 0.04648231987117316, "acc_norm": 0.31, "acc_norm_stderr": 0.04648231987117316 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.7838709677419354, "acc_stderr": 0.02341529343356852, "acc_norm": 0.7838709677419354, "acc_norm_stderr": 0.02341529343356852 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.5073891625615764, "acc_stderr": 0.035176035403610105, "acc_norm": 0.5073891625615764, "acc_norm_stderr": 0.035176035403610105 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.68, "acc_stderr": 0.04688261722621505, "acc_norm": 0.68, "acc_norm_stderr": 0.04688261722621505 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.7878787878787878, "acc_stderr": 0.03192271569548301, "acc_norm": 0.7878787878787878, "acc_norm_stderr": 0.03192271569548301 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.8232323232323232, "acc_stderr": 0.027178752639044915, "acc_norm": 0.8232323232323232, "acc_norm_stderr": 0.027178752639044915 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.8963730569948186, "acc_stderr": 0.021995311963644237, "acc_norm": 0.8963730569948186, "acc_norm_stderr": 0.021995311963644237 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.658974358974359, "acc_stderr": 0.02403548967633508, "acc_norm": 0.658974358974359, "acc_norm_stderr": 0.02403548967633508 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.34814814814814815, "acc_stderr": 0.029045600290616255, "acc_norm": 0.34814814814814815, "acc_norm_stderr": 0.029045600290616255 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.6890756302521008, "acc_stderr": 0.030066761582977938, "acc_norm": 0.6890756302521008, "acc_norm_stderr": 0.030066761582977938 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.39072847682119205, "acc_stderr": 0.03983798306659806, "acc_norm": 0.39072847682119205, "acc_norm_stderr": 0.03983798306659806 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.8385321100917431, "acc_stderr": 0.015776239256163255, "acc_norm": 0.8385321100917431, "acc_norm_stderr": 0.015776239256163255 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.49074074074074076, "acc_stderr": 0.034093869469927006, "acc_norm": 0.49074074074074076, "acc_norm_stderr": 0.034093869469927006 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.8333333333333334, "acc_stderr": 0.026156867523931045, "acc_norm": 0.8333333333333334, "acc_norm_stderr": 0.026156867523931045 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.8016877637130801, "acc_stderr": 0.02595502084162113, "acc_norm": 0.8016877637130801, "acc_norm_stderr": 0.02595502084162113 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.6905829596412556, "acc_stderr": 0.031024411740572213, "acc_norm": 0.6905829596412556, "acc_norm_stderr": 0.031024411740572213 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.7862595419847328, "acc_stderr": 0.0359546161177469, "acc_norm": 0.7862595419847328, "acc_norm_stderr": 0.0359546161177469 }, "harness|hendrycksTest-international_law|5": { "acc": 0.8016528925619835, "acc_stderr": 0.03640118271990946, "acc_norm": 0.8016528925619835, "acc_norm_stderr": 0.03640118271990946 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.75, "acc_stderr": 0.04186091791394607, "acc_norm": 0.75, "acc_norm_stderr": 0.04186091791394607 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.7607361963190185, "acc_stderr": 0.033519538795212696, "acc_norm": 0.7607361963190185, "acc_norm_stderr": 0.033519538795212696 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.44642857142857145, "acc_stderr": 0.04718471485219588, "acc_norm": 0.44642857142857145, "acc_norm_stderr": 0.04718471485219588 }, "harness|hendrycksTest-management|5": { "acc": 0.7766990291262136, "acc_stderr": 0.04123553189891431, "acc_norm": 0.7766990291262136, "acc_norm_stderr": 0.04123553189891431 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8931623931623932, "acc_stderr": 0.02023714900899093, "acc_norm": 0.8931623931623932, "acc_norm_stderr": 0.02023714900899093 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.73, "acc_stderr": 0.044619604333847394, "acc_norm": 0.73, "acc_norm_stderr": 0.044619604333847394 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.8237547892720306, "acc_stderr": 0.013625556907993466, "acc_norm": 0.8237547892720306, "acc_norm_stderr": 0.013625556907993466 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.7398843930635838, "acc_stderr": 0.023618678310069356, "acc_norm": 0.7398843930635838, "acc_norm_stderr": 0.023618678310069356 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.4301675977653631, "acc_stderr": 0.01655860163604103, "acc_norm": 0.4301675977653631, "acc_norm_stderr": 0.01655860163604103 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.7156862745098039, "acc_stderr": 0.025829163272757482, "acc_norm": 0.7156862745098039, "acc_norm_stderr": 0.025829163272757482 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.707395498392283, "acc_stderr": 0.02583989833487798, "acc_norm": 0.707395498392283, "acc_norm_stderr": 0.02583989833487798 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.7561728395061729, "acc_stderr": 0.023891879541959614, "acc_norm": 0.7561728395061729, "acc_norm_stderr": 0.023891879541959614 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.5, "acc_stderr": 0.029827499313594685, "acc_norm": 0.5, "acc_norm_stderr": 0.029827499313594685 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.4680573663624511, "acc_stderr": 0.012744149704869647, "acc_norm": 0.4680573663624511, "acc_norm_stderr": 0.012744149704869647 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.6691176470588235, "acc_stderr": 0.02858270975389845, "acc_norm": 0.6691176470588235, "acc_norm_stderr": 0.02858270975389845 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.6617647058823529, "acc_stderr": 0.01913994374848704, "acc_norm": 0.6617647058823529, "acc_norm_stderr": 0.01913994374848704 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6818181818181818, "acc_stderr": 0.044612721759105085, "acc_norm": 0.6818181818181818, "acc_norm_stderr": 0.044612721759105085 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.7224489795918367, "acc_stderr": 0.02866685779027465, "acc_norm": 0.7224489795918367, "acc_norm_stderr": 0.02866685779027465 }, "harness|hendrycksTest-sociology|5": { "acc": 0.8258706467661692, "acc_stderr": 0.026814951200421603, "acc_norm": 0.8258706467661692, "acc_norm_stderr": 0.026814951200421603 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.85, "acc_stderr": 0.0358870281282637, "acc_norm": 0.85, "acc_norm_stderr": 0.0358870281282637 }, "harness|hendrycksTest-virology|5": { "acc": 0.5481927710843374, "acc_stderr": 0.03874371556587953, "acc_norm": 0.5481927710843374, "acc_norm_stderr": 0.03874371556587953 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.8304093567251462, "acc_stderr": 0.02878210810540171, "acc_norm": 0.8304093567251462, "acc_norm_stderr": 0.02878210810540171 }, "harness|truthfulqa:mc|0": { "mc1": 0.5520195838433293, "mc1_stderr": 0.017408513063422917, "mc2": 0.6899750707536572, "mc2_stderr": 0.01507018824423322 }, "harness|winogrande|5": { "acc": 0.8626677190213102, "acc_stderr": 0.009673669315476049 }, "harness|gsm8k|5": { "acc": 0.6808188021228203, "acc_stderr": 0.012840345676251653 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_Kquant03__Buttercup-V2-laser
[ "region:us" ]
2024-02-16T07:36:29+00:00
{"pretty_name": "Evaluation run of Kquant03/Buttercup-V2-laser", "dataset_summary": "Dataset automatically created during the evaluation run of model [Kquant03/Buttercup-V2-laser](https://huggingface.co/Kquant03/Buttercup-V2-laser) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_Kquant03__Buttercup-V2-laser\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-02-16T07:34:11.973720](https://huggingface.co/datasets/open-llm-leaderboard/details_Kquant03__Buttercup-V2-laser/blob/main/results_2024-02-16T07-34-11.973720.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.6535761549256881,\n \"acc_stderr\": 0.03205604876868876,\n \"acc_norm\": 0.6528640185317818,\n \"acc_norm_stderr\": 0.032733047429496384,\n \"mc1\": 0.5520195838433293,\n \"mc1_stderr\": 0.017408513063422917,\n \"mc2\": 0.6899750707536572,\n \"mc2_stderr\": 0.01507018824423322\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.7081911262798635,\n \"acc_stderr\": 0.013284525292403511,\n \"acc_norm\": 0.7312286689419796,\n \"acc_norm_stderr\": 0.012955065963710698\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.7135032861979685,\n \"acc_stderr\": 0.004512002459757956,\n \"acc_norm\": 0.8847839075881299,\n \"acc_norm_stderr\": 0.0031863002304505753\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.33,\n \"acc_stderr\": 0.047258156262526045,\n \"acc_norm\": 0.33,\n \"acc_norm_stderr\": 0.047258156262526045\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.6592592592592592,\n \"acc_stderr\": 0.040943762699967926,\n \"acc_norm\": 0.6592592592592592,\n \"acc_norm_stderr\": 0.040943762699967926\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.6973684210526315,\n \"acc_stderr\": 0.03738520676119669,\n \"acc_norm\": 0.6973684210526315,\n \"acc_norm_stderr\": 0.03738520676119669\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.64,\n \"acc_stderr\": 0.04824181513244218,\n \"acc_norm\": 0.64,\n \"acc_norm_stderr\": 0.04824181513244218\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.7018867924528301,\n \"acc_stderr\": 0.02815283794249387,\n \"acc_norm\": 0.7018867924528301,\n \"acc_norm_stderr\": 0.02815283794249387\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.7777777777777778,\n \"acc_stderr\": 0.03476590104304134,\n \"acc_norm\": 0.7777777777777778,\n \"acc_norm_stderr\": 0.03476590104304134\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.44,\n \"acc_stderr\": 0.04988876515698589,\n \"acc_norm\": 0.44,\n \"acc_norm_stderr\": 0.04988876515698589\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.52,\n \"acc_stderr\": 0.050211673156867795,\n \"acc_norm\": 0.52,\n \"acc_norm_stderr\": 0.050211673156867795\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.31,\n \"acc_stderr\": 0.04648231987117316,\n \"acc_norm\": 0.31,\n \"acc_norm_stderr\": 0.04648231987117316\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.6705202312138728,\n \"acc_stderr\": 0.03583901754736412,\n \"acc_norm\": 0.6705202312138728,\n \"acc_norm_stderr\": 0.03583901754736412\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.4117647058823529,\n \"acc_stderr\": 0.048971049527263666,\n \"acc_norm\": 0.4117647058823529,\n \"acc_norm_stderr\": 0.048971049527263666\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.76,\n \"acc_stderr\": 0.04292346959909283,\n \"acc_norm\": 0.76,\n \"acc_norm_stderr\": 0.04292346959909283\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.5702127659574469,\n \"acc_stderr\": 0.03236214467715564,\n \"acc_norm\": 0.5702127659574469,\n \"acc_norm_stderr\": 0.03236214467715564\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.5175438596491229,\n \"acc_stderr\": 0.04700708033551038,\n \"acc_norm\": 0.5175438596491229,\n \"acc_norm_stderr\": 0.04700708033551038\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.5724137931034483,\n \"acc_stderr\": 0.04122737111370333,\n \"acc_norm\": 0.5724137931034483,\n \"acc_norm_stderr\": 0.04122737111370333\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.43386243386243384,\n \"acc_stderr\": 0.025525034382474887,\n \"acc_norm\": 0.43386243386243384,\n \"acc_norm_stderr\": 0.025525034382474887\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.48412698412698413,\n \"acc_stderr\": 0.04469881854072606,\n \"acc_norm\": 0.48412698412698413,\n \"acc_norm_stderr\": 0.04469881854072606\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.31,\n \"acc_stderr\": 0.04648231987117316,\n \"acc_norm\": 0.31,\n \"acc_norm_stderr\": 0.04648231987117316\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.7838709677419354,\n \"acc_stderr\": 0.02341529343356852,\n \"acc_norm\": 0.7838709677419354,\n \"acc_norm_stderr\": 0.02341529343356852\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.5073891625615764,\n \"acc_stderr\": 0.035176035403610105,\n \"acc_norm\": 0.5073891625615764,\n \"acc_norm_stderr\": 0.035176035403610105\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.68,\n \"acc_stderr\": 0.04688261722621505,\n \"acc_norm\": 0.68,\n \"acc_norm_stderr\": 0.04688261722621505\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.7878787878787878,\n \"acc_stderr\": 0.03192271569548301,\n \"acc_norm\": 0.7878787878787878,\n \"acc_norm_stderr\": 0.03192271569548301\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.8232323232323232,\n \"acc_stderr\": 0.027178752639044915,\n \"acc_norm\": 0.8232323232323232,\n \"acc_norm_stderr\": 0.027178752639044915\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.8963730569948186,\n \"acc_stderr\": 0.021995311963644237,\n \"acc_norm\": 0.8963730569948186,\n \"acc_norm_stderr\": 0.021995311963644237\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.658974358974359,\n \"acc_stderr\": 0.02403548967633508,\n \"acc_norm\": 0.658974358974359,\n \"acc_norm_stderr\": 0.02403548967633508\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.34814814814814815,\n \"acc_stderr\": 0.029045600290616255,\n \"acc_norm\": 0.34814814814814815,\n \"acc_norm_stderr\": 0.029045600290616255\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.6890756302521008,\n \"acc_stderr\": 0.030066761582977938,\n \"acc_norm\": 0.6890756302521008,\n \"acc_norm_stderr\": 0.030066761582977938\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.39072847682119205,\n \"acc_stderr\": 0.03983798306659806,\n \"acc_norm\": 0.39072847682119205,\n \"acc_norm_stderr\": 0.03983798306659806\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.8385321100917431,\n \"acc_stderr\": 0.015776239256163255,\n \"acc_norm\": 0.8385321100917431,\n \"acc_norm_stderr\": 0.015776239256163255\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.49074074074074076,\n \"acc_stderr\": 0.034093869469927006,\n \"acc_norm\": 0.49074074074074076,\n \"acc_norm_stderr\": 0.034093869469927006\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.8333333333333334,\n \"acc_stderr\": 0.026156867523931045,\n \"acc_norm\": 0.8333333333333334,\n \"acc_norm_stderr\": 0.026156867523931045\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.8016877637130801,\n \"acc_stderr\": 0.02595502084162113,\n \"acc_norm\": 0.8016877637130801,\n \"acc_norm_stderr\": 0.02595502084162113\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.6905829596412556,\n \"acc_stderr\": 0.031024411740572213,\n \"acc_norm\": 0.6905829596412556,\n \"acc_norm_stderr\": 0.031024411740572213\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.7862595419847328,\n \"acc_stderr\": 0.0359546161177469,\n \"acc_norm\": 0.7862595419847328,\n \"acc_norm_stderr\": 0.0359546161177469\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.8016528925619835,\n \"acc_stderr\": 0.03640118271990946,\n \"acc_norm\": 0.8016528925619835,\n \"acc_norm_stderr\": 0.03640118271990946\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.75,\n \"acc_stderr\": 0.04186091791394607,\n \"acc_norm\": 0.75,\n \"acc_norm_stderr\": 0.04186091791394607\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.7607361963190185,\n \"acc_stderr\": 0.033519538795212696,\n \"acc_norm\": 0.7607361963190185,\n \"acc_norm_stderr\": 0.033519538795212696\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.44642857142857145,\n \"acc_stderr\": 0.04718471485219588,\n \"acc_norm\": 0.44642857142857145,\n \"acc_norm_stderr\": 0.04718471485219588\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.7766990291262136,\n \"acc_stderr\": 0.04123553189891431,\n \"acc_norm\": 0.7766990291262136,\n \"acc_norm_stderr\": 0.04123553189891431\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8931623931623932,\n \"acc_stderr\": 0.02023714900899093,\n \"acc_norm\": 0.8931623931623932,\n \"acc_norm_stderr\": 0.02023714900899093\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.73,\n \"acc_stderr\": 0.044619604333847394,\n \"acc_norm\": 0.73,\n \"acc_norm_stderr\": 0.044619604333847394\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.8237547892720306,\n \"acc_stderr\": 0.013625556907993466,\n \"acc_norm\": 0.8237547892720306,\n \"acc_norm_stderr\": 0.013625556907993466\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.7398843930635838,\n \"acc_stderr\": 0.023618678310069356,\n \"acc_norm\": 0.7398843930635838,\n \"acc_norm_stderr\": 0.023618678310069356\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.4301675977653631,\n \"acc_stderr\": 0.01655860163604103,\n \"acc_norm\": 0.4301675977653631,\n \"acc_norm_stderr\": 0.01655860163604103\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.7156862745098039,\n \"acc_stderr\": 0.025829163272757482,\n \"acc_norm\": 0.7156862745098039,\n \"acc_norm_stderr\": 0.025829163272757482\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.707395498392283,\n \"acc_stderr\": 0.02583989833487798,\n \"acc_norm\": 0.707395498392283,\n \"acc_norm_stderr\": 0.02583989833487798\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.7561728395061729,\n \"acc_stderr\": 0.023891879541959614,\n \"acc_norm\": 0.7561728395061729,\n \"acc_norm_stderr\": 0.023891879541959614\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.5,\n \"acc_stderr\": 0.029827499313594685,\n \"acc_norm\": 0.5,\n \"acc_norm_stderr\": 0.029827499313594685\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.4680573663624511,\n \"acc_stderr\": 0.012744149704869647,\n \"acc_norm\": 0.4680573663624511,\n \"acc_norm_stderr\": 0.012744149704869647\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.6691176470588235,\n \"acc_stderr\": 0.02858270975389845,\n \"acc_norm\": 0.6691176470588235,\n \"acc_norm_stderr\": 0.02858270975389845\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.6617647058823529,\n \"acc_stderr\": 0.01913994374848704,\n \"acc_norm\": 0.6617647058823529,\n \"acc_norm_stderr\": 0.01913994374848704\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6818181818181818,\n \"acc_stderr\": 0.044612721759105085,\n \"acc_norm\": 0.6818181818181818,\n \"acc_norm_stderr\": 0.044612721759105085\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.7224489795918367,\n \"acc_stderr\": 0.02866685779027465,\n \"acc_norm\": 0.7224489795918367,\n \"acc_norm_stderr\": 0.02866685779027465\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.8258706467661692,\n \"acc_stderr\": 0.026814951200421603,\n \"acc_norm\": 0.8258706467661692,\n \"acc_norm_stderr\": 0.026814951200421603\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.85,\n \"acc_stderr\": 0.0358870281282637,\n \"acc_norm\": 0.85,\n \"acc_norm_stderr\": 0.0358870281282637\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5481927710843374,\n \"acc_stderr\": 0.03874371556587953,\n \"acc_norm\": 0.5481927710843374,\n \"acc_norm_stderr\": 0.03874371556587953\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.8304093567251462,\n \"acc_stderr\": 0.02878210810540171,\n \"acc_norm\": 0.8304093567251462,\n \"acc_norm_stderr\": 0.02878210810540171\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.5520195838433293,\n \"mc1_stderr\": 0.017408513063422917,\n \"mc2\": 0.6899750707536572,\n \"mc2_stderr\": 0.01507018824423322\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.8626677190213102,\n \"acc_stderr\": 0.009673669315476049\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.6808188021228203,\n \"acc_stderr\": 0.012840345676251653\n }\n}\n```", "repo_url": "https://huggingface.co/Kquant03/Buttercup-V2-laser", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_02_16T07_34_11.973720", "path": ["**/details_harness|arc:challenge|25_2024-02-16T07-34-11.973720.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-02-16T07-34-11.973720.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_02_16T07_34_11.973720", "path": ["**/details_harness|gsm8k|5_2024-02-16T07-34-11.973720.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-02-16T07-34-11.973720.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_02_16T07_34_11.973720", "path": ["**/details_harness|hellaswag|10_2024-02-16T07-34-11.973720.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-02-16T07-34-11.973720.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_02_16T07_34_11.973720", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-16T07-34-11.973720.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-02-16T07-34-11.973720.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-02-16T07-34-11.973720.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-02-16T07-34-11.973720.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-16T07-34-11.973720.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-02-16T07-34-11.973720.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-02-16T07-34-11.973720.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-02-16T07-34-11.973720.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-02-16T07-34-11.973720.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-02-16T07-34-11.973720.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-02-16T07-34-11.973720.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-02-16T07-34-11.973720.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-16T07-34-11.973720.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-02-16T07-34-11.973720.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-16T07-34-11.973720.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-16T07-34-11.973720.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-02-16T07-34-11.973720.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-02-16T07-34-11.973720.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-02-16T07-34-11.973720.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-16T07-34-11.973720.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-16T07-34-11.973720.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-16T07-34-11.973720.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-02-16T07-34-11.973720.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-16T07-34-11.973720.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-16T07-34-11.973720.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-16T07-34-11.973720.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-16T07-34-11.973720.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-02-16T07-34-11.973720.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-16T07-34-11.973720.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-16T07-34-11.973720.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-16T07-34-11.973720.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-16T07-34-11.973720.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-02-16T07-34-11.973720.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-02-16T07-34-11.973720.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-02-16T07-34-11.973720.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-02-16T07-34-11.973720.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-16T07-34-11.973720.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-02-16T07-34-11.973720.parquet", "**/details_harness|hendrycksTest-management|5_2024-02-16T07-34-11.973720.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-02-16T07-34-11.973720.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-02-16T07-34-11.973720.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-02-16T07-34-11.973720.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-02-16T07-34-11.973720.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-16T07-34-11.973720.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-02-16T07-34-11.973720.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-02-16T07-34-11.973720.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-02-16T07-34-11.973720.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-02-16T07-34-11.973720.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-02-16T07-34-11.973720.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-02-16T07-34-11.973720.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-02-16T07-34-11.973720.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-02-16T07-34-11.973720.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-02-16T07-34-11.973720.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-02-16T07-34-11.973720.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-16T07-34-11.973720.parquet", "**/details_harness|hendrycksTest-virology|5_2024-02-16T07-34-11.973720.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-02-16T07-34-11.973720.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-16T07-34-11.973720.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-02-16T07-34-11.973720.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-02-16T07-34-11.973720.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-02-16T07-34-11.973720.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-16T07-34-11.973720.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-02-16T07-34-11.973720.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-02-16T07-34-11.973720.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-02-16T07-34-11.973720.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-02-16T07-34-11.973720.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-02-16T07-34-11.973720.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-02-16T07-34-11.973720.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-02-16T07-34-11.973720.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-16T07-34-11.973720.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-02-16T07-34-11.973720.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-16T07-34-11.973720.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-16T07-34-11.973720.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-02-16T07-34-11.973720.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-02-16T07-34-11.973720.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-02-16T07-34-11.973720.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-16T07-34-11.973720.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-16T07-34-11.973720.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-16T07-34-11.973720.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-02-16T07-34-11.973720.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-16T07-34-11.973720.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-16T07-34-11.973720.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-16T07-34-11.973720.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-16T07-34-11.973720.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-02-16T07-34-11.973720.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-16T07-34-11.973720.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-16T07-34-11.973720.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-16T07-34-11.973720.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-16T07-34-11.973720.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-02-16T07-34-11.973720.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-02-16T07-34-11.973720.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-02-16T07-34-11.973720.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-02-16T07-34-11.973720.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-16T07-34-11.973720.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-02-16T07-34-11.973720.parquet", "**/details_harness|hendrycksTest-management|5_2024-02-16T07-34-11.973720.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-02-16T07-34-11.973720.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-02-16T07-34-11.973720.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-02-16T07-34-11.973720.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-02-16T07-34-11.973720.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-16T07-34-11.973720.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-02-16T07-34-11.973720.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-02-16T07-34-11.973720.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-02-16T07-34-11.973720.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-02-16T07-34-11.973720.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-02-16T07-34-11.973720.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-02-16T07-34-11.973720.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-02-16T07-34-11.973720.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-02-16T07-34-11.973720.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-02-16T07-34-11.973720.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-02-16T07-34-11.973720.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-16T07-34-11.973720.parquet", "**/details_harness|hendrycksTest-virology|5_2024-02-16T07-34-11.973720.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-02-16T07-34-11.973720.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_02_16T07_34_11.973720", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-16T07-34-11.973720.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-16T07-34-11.973720.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_02_16T07_34_11.973720", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-02-16T07-34-11.973720.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-02-16T07-34-11.973720.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_02_16T07_34_11.973720", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-02-16T07-34-11.973720.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-02-16T07-34-11.973720.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_02_16T07_34_11.973720", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-02-16T07-34-11.973720.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-02-16T07-34-11.973720.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_02_16T07_34_11.973720", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-16T07-34-11.973720.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-16T07-34-11.973720.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_02_16T07_34_11.973720", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-02-16T07-34-11.973720.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-02-16T07-34-11.973720.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_02_16T07_34_11.973720", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-02-16T07-34-11.973720.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-02-16T07-34-11.973720.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_02_16T07_34_11.973720", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-02-16T07-34-11.973720.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-02-16T07-34-11.973720.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_02_16T07_34_11.973720", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-02-16T07-34-11.973720.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-02-16T07-34-11.973720.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_02_16T07_34_11.973720", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-02-16T07-34-11.973720.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-02-16T07-34-11.973720.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_02_16T07_34_11.973720", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-02-16T07-34-11.973720.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-02-16T07-34-11.973720.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_02_16T07_34_11.973720", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-02-16T07-34-11.973720.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-02-16T07-34-11.973720.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_02_16T07_34_11.973720", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-16T07-34-11.973720.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-16T07-34-11.973720.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_02_16T07_34_11.973720", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-02-16T07-34-11.973720.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-02-16T07-34-11.973720.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_02_16T07_34_11.973720", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-16T07-34-11.973720.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-16T07-34-11.973720.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_02_16T07_34_11.973720", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-16T07-34-11.973720.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-16T07-34-11.973720.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_02_16T07_34_11.973720", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-02-16T07-34-11.973720.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-02-16T07-34-11.973720.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_02_16T07_34_11.973720", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-02-16T07-34-11.973720.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-02-16T07-34-11.973720.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_02_16T07_34_11.973720", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-02-16T07-34-11.973720.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-02-16T07-34-11.973720.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_02_16T07_34_11.973720", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-16T07-34-11.973720.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-16T07-34-11.973720.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_02_16T07_34_11.973720", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-16T07-34-11.973720.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-16T07-34-11.973720.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_02_16T07_34_11.973720", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-16T07-34-11.973720.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-16T07-34-11.973720.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_02_16T07_34_11.973720", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-02-16T07-34-11.973720.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-02-16T07-34-11.973720.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_02_16T07_34_11.973720", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-16T07-34-11.973720.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-16T07-34-11.973720.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_02_16T07_34_11.973720", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-16T07-34-11.973720.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-16T07-34-11.973720.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_02_16T07_34_11.973720", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-16T07-34-11.973720.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-16T07-34-11.973720.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_02_16T07_34_11.973720", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-16T07-34-11.973720.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-16T07-34-11.973720.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_02_16T07_34_11.973720", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-02-16T07-34-11.973720.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-02-16T07-34-11.973720.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_02_16T07_34_11.973720", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-16T07-34-11.973720.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-16T07-34-11.973720.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_02_16T07_34_11.973720", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-16T07-34-11.973720.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-16T07-34-11.973720.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_02_16T07_34_11.973720", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-16T07-34-11.973720.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-16T07-34-11.973720.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_02_16T07_34_11.973720", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-16T07-34-11.973720.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-16T07-34-11.973720.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_02_16T07_34_11.973720", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-02-16T07-34-11.973720.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-02-16T07-34-11.973720.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_02_16T07_34_11.973720", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-02-16T07-34-11.973720.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-02-16T07-34-11.973720.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_02_16T07_34_11.973720", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-02-16T07-34-11.973720.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-02-16T07-34-11.973720.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_02_16T07_34_11.973720", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-02-16T07-34-11.973720.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-02-16T07-34-11.973720.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_02_16T07_34_11.973720", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-16T07-34-11.973720.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-16T07-34-11.973720.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_02_16T07_34_11.973720", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-02-16T07-34-11.973720.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-02-16T07-34-11.973720.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_02_16T07_34_11.973720", "path": ["**/details_harness|hendrycksTest-management|5_2024-02-16T07-34-11.973720.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-02-16T07-34-11.973720.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_02_16T07_34_11.973720", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-02-16T07-34-11.973720.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-02-16T07-34-11.973720.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_02_16T07_34_11.973720", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-02-16T07-34-11.973720.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-02-16T07-34-11.973720.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_02_16T07_34_11.973720", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-02-16T07-34-11.973720.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-02-16T07-34-11.973720.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_02_16T07_34_11.973720", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-02-16T07-34-11.973720.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-02-16T07-34-11.973720.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_02_16T07_34_11.973720", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-16T07-34-11.973720.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-16T07-34-11.973720.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_02_16T07_34_11.973720", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-02-16T07-34-11.973720.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-02-16T07-34-11.973720.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_02_16T07_34_11.973720", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-02-16T07-34-11.973720.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-02-16T07-34-11.973720.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_02_16T07_34_11.973720", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-02-16T07-34-11.973720.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-02-16T07-34-11.973720.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_02_16T07_34_11.973720", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-02-16T07-34-11.973720.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-02-16T07-34-11.973720.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_02_16T07_34_11.973720", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-02-16T07-34-11.973720.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-02-16T07-34-11.973720.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_02_16T07_34_11.973720", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-02-16T07-34-11.973720.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-02-16T07-34-11.973720.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_02_16T07_34_11.973720", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-02-16T07-34-11.973720.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-02-16T07-34-11.973720.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_02_16T07_34_11.973720", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-02-16T07-34-11.973720.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-02-16T07-34-11.973720.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_02_16T07_34_11.973720", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-02-16T07-34-11.973720.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-02-16T07-34-11.973720.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_02_16T07_34_11.973720", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-02-16T07-34-11.973720.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-02-16T07-34-11.973720.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_02_16T07_34_11.973720", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-16T07-34-11.973720.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-16T07-34-11.973720.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_02_16T07_34_11.973720", "path": ["**/details_harness|hendrycksTest-virology|5_2024-02-16T07-34-11.973720.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-02-16T07-34-11.973720.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_02_16T07_34_11.973720", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-02-16T07-34-11.973720.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-02-16T07-34-11.973720.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_02_16T07_34_11.973720", "path": ["**/details_harness|truthfulqa:mc|0_2024-02-16T07-34-11.973720.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-02-16T07-34-11.973720.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_02_16T07_34_11.973720", "path": ["**/details_harness|winogrande|5_2024-02-16T07-34-11.973720.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-02-16T07-34-11.973720.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_02_16T07_34_11.973720", "path": ["results_2024-02-16T07-34-11.973720.parquet"]}, {"split": "latest", "path": ["results_2024-02-16T07-34-11.973720.parquet"]}]}]}
2024-02-16T07:36:51+00:00
d51dee2e2b1ec50127a6338e07966b820cf30842
chansung/janus-test-ds
[ "region:us" ]
2024-02-16T07:37:31+00:00
{"dataset_info": {"features": [{"name": "model", "dtype": "string"}, {"name": "conversations", "list": [{"name": "assistant", "dtype": "string"}, {"name": "user", "dtype": "string"}]}, {"name": "type", "dtype": "string"}, {"name": "category", "dtype": "string"}, {"name": "conversation", "list": [{"name": "assistant", "dtype": "string"}, {"name": "user", "dtype": "string"}]}], "splits": [{"name": "train", "num_bytes": 96757.43396226416, "num_examples": 49}], "download_size": 54382, "dataset_size": 96757.43396226416}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]}
2024-02-16T08:02:29+00:00
2591781a02b2c91ad1bea58d1bbf9bbebaddcdb1
benayas/banking_augmented_5pct_v1
[ "region:us" ]
2024-02-16T07:54:37+00:00
{"dataset_info": {"features": [{"name": "text", "dtype": "string"}, {"name": "category", "dtype": "string"}, {"name": "__index_level_0__", "dtype": "int64"}], "splits": [{"name": "train", "num_bytes": 1025004, "num_examples": 10003}], "download_size": 399224, "dataset_size": 1025004}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]}
2024-02-16T07:54:40+00:00