sha
stringlengths 40
40
| text
stringlengths 0
13.4M
| id
stringlengths 2
117
| tags
sequence | created_at
stringlengths 25
25
| metadata
stringlengths 2
31.7M
| last_modified
stringlengths 25
25
|
---|---|---|---|---|---|---|
20f04e881aacf450130d583c1b419a15ace819b0 | Recag/Rp_CommonC_642_2 | [
"region:us"
] | 2024-02-15T12:34:06+00:00 | {} | 2024-02-15T12:41:12+00:00 |
|
6517c306f12aa79682c5160edcee533a425db182 | Recag/Rp_CommonC_643_1 | [
"region:us"
] | 2024-02-15T12:34:23+00:00 | {} | 2024-02-15T12:44:05+00:00 |
|
2429241b027debaed01c3bfa820d0556ba02d7cb | Recag/Rp_CommonC_643_2 | [
"region:us"
] | 2024-02-15T12:34:28+00:00 | {} | 2024-02-15T12:42:20+00:00 |
|
628e80cfeb4515fc0260e165338ace46cde7a218 | Recag/Rp_CommonC_644_1 | [
"region:us"
] | 2024-02-15T12:36:52+00:00 | {} | 2024-02-15T12:44:12+00:00 |
|
a667f98ee88117648bc947e070536159ad3af88a | Recag/Rp_CommonC_644_2 | [
"region:us"
] | 2024-02-15T12:36:58+00:00 | {} | 2024-02-15T12:43:19+00:00 |
|
ca5d9fbf7e05f84c04bd0342f402d21e1745dfb6 | Recag/Rp_CommonC_730_2 | [
"region:us"
] | 2024-02-15T12:40:34+00:00 | {} | 2024-02-15T12:54:22+00:00 |
|
ff786f140106ac1360bd41505a4dbaf549269d54 | Recag/Rp_CommonC_730_3 | [
"region:us"
] | 2024-02-15T12:46:44+00:00 | {} | 2024-02-15T12:57:48+00:00 |
|
9bdcb1172eaad5cc0ad5ab03f3be27cdf3ba9f1e | Recag/Rp_CommonC_645_1 | [
"region:us"
] | 2024-02-15T12:47:39+00:00 | {} | 2024-02-15T12:58:06+00:00 |
|
9b8b911ea0b91a382c43b225178a0664bfb02eef | Recag/Rp_CommonC_645_2 | [
"region:us"
] | 2024-02-15T12:47:45+00:00 | {} | 2024-02-15T12:54:51+00:00 |
|
1fa58a20e62c3f8b7b1a1e2f2a5b5cb8feba7f1e | Recag/Rp_CommonC_646_1 | [
"region:us"
] | 2024-02-15T12:47:55+00:00 | {} | 2024-02-15T12:58:13+00:00 |
|
9a60fd32cd2a7b8356e40c1f039526fab7cb5a6b | Recag/Rp_CommonC_646_2 | [
"region:us"
] | 2024-02-15T12:48:06+00:00 | {} | 2024-02-15T13:00:03+00:00 |
|
d56fc03aae388640e617698d1095c92dcdcc501b | Recag/Rp_CommonC_647_1 | [
"region:us"
] | 2024-02-15T12:48:22+00:00 | {} | 2024-02-15T13:00:10+00:00 |
|
2b7f6a8f3b94506195ea914ec5ef7b078683a3bc | Recag/Rp_CommonC_647_2 | [
"region:us"
] | 2024-02-15T12:48:32+00:00 | {} | 2024-02-15T12:59:00+00:00 |
|
47ebd35f35e7cb722f1a16f376b85983970dcab3 | Recag/Rp_CommonC_648_1 | [
"region:us"
] | 2024-02-15T12:48:44+00:00 | {} | 2024-02-15T12:59:44+00:00 |
|
e0357b760e57f87d5d1bea4f736d01191f5b496d | Recag/Rp_CommonC_648_2 | [
"region:us"
] | 2024-02-15T12:48:52+00:00 | {} | 2024-02-15T12:59:53+00:00 |
|
afddddeb8af4b171abbd7ff201767f7101fb0dae | Recag/Rp_CommonC_731_1 | [
"region:us"
] | 2024-02-15T12:55:58+00:00 | {} | 2024-02-15T13:03:57+00:00 |
|
df514ff8f0d6676791657bb38de82fc704d2ed41 | Recag/Rp_CommonC_731_2 | [
"region:us"
] | 2024-02-15T12:57:00+00:00 | {} | 2024-02-15T13:06:59+00:00 |
|
00baa3a534c7c125c8c7877c67781426b09573f7 | Recag/Rp_CommonC_731_3 | [
"region:us"
] | 2024-02-15T12:57:34+00:00 | {} | 2024-02-15T13:10:05+00:00 |
|
dc4552fb66646087c68ff5095278e9f115c67f27 | Recag/Rp_CommonC_732_1 | [
"region:us"
] | 2024-02-15T12:57:56+00:00 | {} | 2024-02-15T13:13:29+00:00 |
|
c9320cd7bd35b529bcada6f94461da3334da1df9 | Recag/Rp_CommonC_732_2 | [
"region:us"
] | 2024-02-15T12:58:07+00:00 | {} | 2024-02-15T13:21:31+00:00 |
|
d90909d43d1f6d163cc73928358ba42bddbc4f4c | Recag/Rp_CommonC_732_3 | [
"region:us"
] | 2024-02-15T12:58:19+00:00 | {} | 2024-02-15T13:18:28+00:00 |
|
8c1aafc9eafc9e5262322eb419666db8f5d6b6cd | Recag/Rp_CommonC_733_1 | [
"region:us"
] | 2024-02-15T12:58:32+00:00 | {} | 2024-02-15T13:24:32+00:00 |
|
83289fb676081955814fde60e7ae009aaf8e14ab | wuchiyongshi/sentiment | [
"region:us"
] | 2024-02-15T12:58:44+00:00 | {} | 2024-02-15T13:00:17+00:00 |
|
13a72c0340980e93c40feaadd6301e36bdd463fd | Recag/Rp_CommonC_733_2 | [
"region:us"
] | 2024-02-15T12:58:45+00:00 | {} | 2024-02-15T13:27:53+00:00 |
|
ee881d8474e7c97d2898be7cf1a62b19be16b6f0 | Recag/Rp_CommonC_733_3 | [
"region:us"
] | 2024-02-15T12:58:56+00:00 | {} | 2024-02-15T13:31:17+00:00 |
|
79d6fd77b333cb7b9c20d32b47542507be26cfb6 | Recag/Rp_CommonC_734_1 | [
"region:us"
] | 2024-02-15T12:59:13+00:00 | {} | 2024-02-15T13:36:05+00:00 |
|
72e539e63f9846b0f533f5bba739ef87ba0e1703 | Recag/Rp_CommonC_734_2 | [
"region:us"
] | 2024-02-15T12:59:31+00:00 | {} | 2024-02-15T13:39:12+00:00 |
|
1de175acf09b6f0b4f147af4125eb09ef1dd430a | Recag/Rp_CommonC_734_3 | [
"region:us"
] | 2024-02-15T12:59:43+00:00 | {} | 2024-02-15T13:41:54+00:00 |
|
ef3dc0a4c66fafd866d0840bf05c1249d6ca9db2 | Recag/Rp_CommonC_735_1 | [
"region:us"
] | 2024-02-15T12:59:56+00:00 | {} | 2024-02-15T13:44:26+00:00 |
|
5dd967266b52be7db53e39e9de0a0b229b4fa6e8 | Recag/Rp_CommonC_735_2 | [
"region:us"
] | 2024-02-15T13:00:14+00:00 | {} | 2024-02-15T13:49:43+00:00 |
|
88a47d0e3f98cf9bd66545af02a815072aa195b0 | Recag/Rp_CommonC_735_3 | [
"region:us"
] | 2024-02-15T13:00:25+00:00 | {} | 2024-02-15T13:49:13+00:00 |
|
a1662e98c19a9db2c03ffb2dce93749f6af63513 | shkocs/uploads | [
"region:us"
] | 2024-02-15T13:07:10+00:00 | {} | 2024-02-15T13:07:10+00:00 |
|
d0d9c31310f20e269bb2c4ac7d13e16a312144ce | Recag/Rp_CommonC_649_1 | [
"region:us"
] | 2024-02-15T13:18:17+00:00 | {} | 2024-02-15T13:31:39+00:00 |
|
eccb355b0c8c8ddd876f0dfe7c6690b863c5fa24 | Recag/Rp_CommonC_649_2 | [
"region:us"
] | 2024-02-15T13:18:50+00:00 | {} | 2024-02-15T13:29:01+00:00 |
|
98b3c3a4520cda946cdb9108b352416f67a183f4 | Recag/Rp_CommonC_650_1 | [
"region:us"
] | 2024-02-15T13:19:05+00:00 | {} | 2024-02-15T13:32:30+00:00 |
|
3682166e946975bec0c659afced6db375786cd12 | Recag/Rp_CommonC_650_2 | [
"region:us"
] | 2024-02-15T13:19:13+00:00 | {} | 2024-02-15T13:30:20+00:00 |
|
30c61e9ecf855123c19d6661f35a85fc27e62775 | Recag/Rp_CommonC_651_1 | [
"region:us"
] | 2024-02-15T13:19:31+00:00 | {} | 2024-02-15T13:32:46+00:00 |
|
9cd7efb8f3aa2218cc9f0e5708897ff93aa42ecd | Recag/Rp_CommonC_651_2 | [
"region:us"
] | 2024-02-15T13:19:41+00:00 | {} | 2024-02-15T13:32:06+00:00 |
|
1bab63119dc083f3791f3205368586c346190d61 | rezaalifilmm/TEST | [
"region:us"
] | 2024-02-15T13:39:48+00:00 | {} | 2024-02-15T13:39:48+00:00 |
|
f70b429e78acfb63da8e7830583e61c67e152190 | Recag/Rp_CommonC_652_1 | [
"region:us"
] | 2024-02-15T13:40:41+00:00 | {} | 2024-02-15T13:52:04+00:00 |
|
b984a17b5ed88e83379e36419e59acb0244094ea | Recag/Rp_CommonC_652_2 | [
"region:us"
] | 2024-02-15T13:40:47+00:00 | {} | 2024-02-15T13:50:24+00:00 |
|
c99fb3f46be9e48e07220df84084c60c5117bb8f | Recag/Rp_CommonC_653_1 | [
"region:us"
] | 2024-02-15T13:41:00+00:00 | {} | 2024-02-15T13:52:50+00:00 |
|
ca44b6e0779d31d110e68e3312b636c1fa1c988d | Recag/Rp_CommonC_653_2 | [
"region:us"
] | 2024-02-15T13:41:06+00:00 | {} | 2024-02-15T13:52:21+00:00 |
|
afcf8ef00427e554c268914f3b6192301fb6d67b | Recag/Rp_CommonC_654_1 | [
"region:us"
] | 2024-02-15T13:41:21+00:00 | {} | 2024-02-15T13:52:57+00:00 |
|
60c31abc353ea3234a2c2b00e21a4861d1edb345 | Recag/Rp_CommonC_654_2 | [
"region:us"
] | 2024-02-15T13:41:26+00:00 | {} | 2024-02-15T13:52:22+00:00 |
|
e8cb02a0c1f1dbda2dea816c396acb8b9da9c42d | Recag/Rp_CommonC_736_1 | [
"region:us"
] | 2024-02-15T13:49:53+00:00 | {} | 2024-02-15T13:52:50+00:00 |
|
aa7d685688653806dc58eae260c5f8220e870640 | Recag/Rp_CommonC_736_2 | [
"region:us"
] | 2024-02-15T13:50:02+00:00 | {} | 2024-02-15T13:57:22+00:00 |
|
6f07b44bfc41de18ac855c1c7d3ea96d677e781c | Recag/Rp_CommonC_736_3 | [
"region:us"
] | 2024-02-15T13:50:49+00:00 | {} | 2024-02-15T13:57:35+00:00 |
|
d0d5930d76c2c938f890acc163d9ea4b7bc7a259 | Recag/Rp_CommonC_737_1 | [
"region:us"
] | 2024-02-15T13:54:39+00:00 | {} | 2024-02-16T12:49:35+00:00 |
|
5fd36a774653bca7ceba86bb6546fb961e0a33bb | SaramNick/ruwhisper_test | [
"region:us"
] | 2024-02-15T13:54:39+00:00 | {} | 2024-02-15T15:39:40+00:00 |
|
5699d075423f04a9f38b38daac18469707c6871d | Recag/Rp_CommonC_737_3 | [
"region:us"
] | 2024-02-15T13:55:31+00:00 | {} | 2024-02-16T12:52:39+00:00 |
|
c1bc0206ca1660676144f3fd94b5fc0980d466a0 | moaminsharifi/Churn_Modelling | [
"region:us"
] | 2024-02-15T13:56:29+00:00 | {} | 2024-02-15T13:57:04+00:00 |
|
0de27a30052a7466d4dd823ac4bdafce0e3d361c | Recag/Rp_CommonC_655_1 | [
"region:us"
] | 2024-02-15T13:58:45+00:00 | {} | 2024-02-15T14:03:39+00:00 |
|
57d402f3b210e83461f6d2a4b5823d242d9a7958 | Recag/Rp_CommonC_655_2 | [
"region:us"
] | 2024-02-15T13:58:58+00:00 | {} | 2024-02-15T14:02:33+00:00 |
|
3d14f087ad2a3124a15886a2e63114db784ccfb1 | Recag/Rp_CommonC_657_1 | [
"region:us"
] | 2024-02-15T14:00:10+00:00 | {} | 2024-02-15T14:04:28+00:00 |
|
1be8fa6fe2650198dc47c37379985feaf24323c9 | Recag/Rp_CommonC_657_2 | [
"region:us"
] | 2024-02-15T14:00:20+00:00 | {} | 2024-02-15T14:03:56+00:00 |
|
8c8cb2a16e6374087aab2383a78721d42fe9c583 | Damodaran/demoTestSetDonut | [
"region:us"
] | 2024-02-15T14:23:11+00:00 | {} | 2024-02-15T14:23:11+00:00 |
|
d742c6dc08a9531c161bffb15f4425d4a054ddc0 | huggingface/figma-Playground-Inference-for-PRO-s-Website | [
"region:us"
] | 2024-02-15T14:40:52+00:00 | {} | 2024-02-15T14:40:52+00:00 |
|
74d48f36edafd0ca1dd60062c99386d3877bfb5f | Orenbac/amz-press-release_summarized | [
"region:us"
] | 2024-02-15T14:59:38+00:00 | {} | 2024-02-17T14:06:31+00:00 |
|
adb49ea9bc6a37cda5be4b18f28b2ee8582045c8 | AlisaMenekse/ErrorCategoriesBCP_10k_rows | [
"region:us"
] | 2024-02-15T15:11:18+00:00 | {} | 2024-02-15T15:13:06+00:00 |
|
e6f6cc9f294d34072a1f674fe81c4afb16d5f192 | enzostvs/figma-plugin-export-frame-to-url | [
"region:us"
] | 2024-02-15T15:26:49+00:00 | {} | 2024-02-15T16:52:02+00:00 |
|
eb5a7ceb7b4a6a47d57e013a808e7bbf55c56168 | Drewskidang/mix_genral | [
"region:us"
] | 2024-02-15T15:32:06+00:00 | {} | 2024-02-16T14:51:20+00:00 |
|
da9c5a516eb71c8da438a53421797a09b95af31a | Drewskidang/ragcomparison | [
"region:us"
] | 2024-02-15T15:46:26+00:00 | {} | 2024-02-15T15:46:38+00:00 |
|
728d5e4963056605837900741d2aed6755060fd1 | Avinier/docker-llm-conversations-v2 | [
"region:us"
] | 2024-02-15T16:03:36+00:00 | {} | 2024-02-15T16:04:13+00:00 |
|
737e6dda9b5f5f056ad1340d4e33483cb2aaa9b2 | mecxlan/brain_images | [
"region:us"
] | 2024-02-15T16:48:30+00:00 | {} | 2024-02-15T16:49:22+00:00 |
|
9e7b96501ec9087337b6e9fa9cd935eb2dfc8283 | Instincts003/p2t-dataset | [
"region:us"
] | 2024-02-15T17:00:55+00:00 | {} | 2024-02-15T17:19:22+00:00 |
|
17c8e6800e238d4e671cff3c7a6cf5c018eda051 | oliverbob/rev1 | [
"region:us"
] | 2024-02-15T17:25:26+00:00 | {} | 2024-02-16T10:11:58+00:00 |
|
c2d8b504a85e32db96bf1c893e4bef6c540020b6 | Bazou/BoobAI | [
"region:us"
] | 2024-02-15T17:50:39+00:00 | {} | 2024-02-15T17:51:06+00:00 |
|
e2f7874bf152a4932ce74a0e124391134bf7b520 | AsemBadr/Al-Rahman | [
"region:us"
] | 2024-02-15T18:03:34+00:00 | {} | 2024-02-15T21:06:22+00:00 |
|
af85f3cec7a812a827474f27a2fa6dfa9665841d | toninhodjj/niki | [
"region:us"
] | 2024-02-15T18:03:52+00:00 | {} | 2024-02-15T18:08:28+00:00 |
|
f85503d964c24a6141b4681735961f52f2858b2e | rodrigotborges/superaudio | [
"region:us"
] | 2024-02-15T18:11:52+00:00 | {} | 2024-02-15T18:15:11+00:00 |
|
e6961f00e5bbefbd4433026a2f17cc7cd4922135 | crncskn/try123 | [
"region:us"
] | 2024-02-15T18:17:46+00:00 | {} | 2024-02-15T18:20:45+00:00 |
|
08326f3ca8807568aa57f76f655fd3ac6f100a75 | rodrigotborges/superaudio2 | [
"region:us"
] | 2024-02-15T18:36:22+00:00 | {} | 2024-02-15T18:37:20+00:00 |
|
91f1104f569a4c06786acb1bae46000e75e13305 | sasha/co2_models | [
"region:us"
] | 2024-02-15T18:48:08+00:00 | {} | 2024-02-16T22:43:01+00:00 |
|
4b15137b7509b9a35dac808b11cc9ae8ce477ad1 | siranli/state-extract | [
"region:us"
] | 2024-02-15T18:53:58+00:00 | {} | 2024-02-15T18:54:33+00:00 |
|
31447d72d83e39a1f0fd3ac41c07da7ba1c9344e | grahvi4545/myVectorStore | [
"license:apache-2.0",
"region:us"
] | 2024-02-15T19:46:18+00:00 | {"license": "apache-2.0"} | 2024-02-15T19:46:18+00:00 |
|
f4e9c7d0423e55e23454f4fd2054efb0e5f0a3c9 | argilla/OpenHermes-2.5-dpo-ckpt2-auto | [
"region:us"
] | 2024-02-15T19:49:04+00:00 | {"dataset_info": {"features": [{"name": "conversations", "list": [{"name": "from", "dtype": "string"}, {"name": "value", "dtype": "string"}, {"name": "weight", "dtype": "float64"}]}, {"name": "input", "dtype": "string"}, {"name": "generation_model", "sequence": "string"}, {"name": "generation_prompt", "sequence": "string"}, {"name": "raw_generation_responses", "sequence": "string"}, {"name": "generations", "sequence": "string"}], "splits": [{"name": "train", "num_bytes": 1181907830, "num_examples": 207569}], "download_size": 576131972, "dataset_size": 1181907830}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]} | 2024-02-16T11:20:48+00:00 |
|
09dfd53b92a4990d45272db95f58ccf6b7f38c5a | davidyoungoc/mini-platypus | [
"region:us"
] | 2024-02-15T19:50:59+00:00 | {"dataset_info": {"features": [{"name": "instruction", "dtype": "string"}, {"name": "output", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 4186564, "num_examples": 1000}], "download_size": 2245921, "dataset_size": 4186564}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]} | 2024-02-15T19:52:33+00:00 |
|
365f1eec822457d5c032b7308bd47b6bce4313c7 | Batraquio1234/Batraquio | [
"region:us"
] | 2024-02-15T19:52:54+00:00 | {} | 2024-02-15T20:03:15+00:00 |
|
4def4661f8501e1da652cd9b5e185dbabb93d7fb | https://huggingface.co/datasets/mhenrichsen/context-aware-splits-english | PocketDoc/text-splitter-alpaca | [
"task_categories:text-generation",
"language:en",
"region:us"
] | 2024-02-15T19:59:32+00:00 | {"language": ["en"], "task_categories": ["text-generation"]} | 2024-02-16T22:27:17+00:00 |
f789247e23bc4bc0fbd364185891bde193655194 | alisson40889/domiro | [
"license:openrail",
"region:us"
] | 2024-02-15T20:05:13+00:00 | {"license": "openrail"} | 2024-02-15T20:06:27+00:00 |
|
4f3df1e4e44270f748866fc8fd7f5a6c60949008 | loubnabnl/math_gradeschool | [
"region:us"
] | 2024-02-15T20:18:16+00:00 | {"dataset_info": {"features": [{"name": "completion", "dtype": "string"}, {"name": "prompt_grade_school", "dtype": "string"}, {"name": "token_length", "dtype": "int64"}], "splits": [{"name": "train", "num_bytes": 19283248, "num_examples": 5000}], "download_size": 9681102, "dataset_size": 19283248}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]} | 2024-02-15T20:18:17+00:00 |
|
3be6278fce61be65736c0adfe3d61ff990fb89e5 | loubnabnl/math_college | [
"region:us"
] | 2024-02-15T20:18:45+00:00 | {"dataset_info": {"features": [{"name": "prompt_college", "dtype": "string"}, {"name": "token_length", "dtype": "int64"}, {"name": "completion", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 25108775, "num_examples": 5000}], "download_size": 12716387, "dataset_size": 25108775}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]} | 2024-02-15T20:18:46+00:00 |
|
7877c7345408921a8984b64b773d11608cc2974d | Kiurachi/lggj | [
"license:openrail",
"region:us"
] | 2024-02-15T20:19:43+00:00 | {"license": "openrail"} | 2024-02-15T20:19:44+00:00 |
|
98aeabdbed04b8e48e1a43763805d5f561c8740d | marcones/elementar1 | [
"license:openrail",
"region:us"
] | 2024-02-15T20:20:59+00:00 | {"license": "openrail"} | 2024-02-15T20:21:38+00:00 |
|
1d6ee463376553b7781730c8a95dfad5769e66ec | VatsaDev/oh2.5-text | [
"license:mit",
"region:us"
] | 2024-02-15T20:24:06+00:00 | {"license": "mit"} | 2024-02-16T16:32:49+00:00 |
|
01261b44a551a41a937f733d650a9c42594fe3fb | macadeliccc/distilabel-neurology-instructions | [
"region:us"
] | 2024-02-15T20:24:57+00:00 | {"dataset_info": {"features": [{"name": "instructions", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 372401, "num_examples": 4000}], "download_size": 96796, "dataset_size": 372401}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]} | 2024-02-15T20:24:59+00:00 |
|
33a1e962efc1668084958d77d16354acef1d7746 |
# Dataset Card for Evaluation run of louisbrulenaudet/Pearl-34B-ties
<!-- Provide a quick summary of the dataset. -->
Dataset automatically created during the evaluation run of model [louisbrulenaudet/Pearl-34B-ties](https://huggingface.co/louisbrulenaudet/Pearl-34B-ties) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).
The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.
The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results.
An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).
To load the details from a run, you can for instance do the following:
```python
from datasets import load_dataset
data = load_dataset("open-llm-leaderboard/details_louisbrulenaudet__Pearl-34B-ties",
"harness_winogrande_5",
split="train")
```
## Latest results
These are the [latest results from run 2024-02-15T20:29:21.982361](https://huggingface.co/datasets/open-llm-leaderboard/details_louisbrulenaudet__Pearl-34B-ties/blob/main/results_2024-02-15T20-29-21.982361.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval):
```python
{
"all": {
"acc": 0.7624896367346236,
"acc_stderr": 0.02823253317418589,
"acc_norm": 0.7667330036075873,
"acc_norm_stderr": 0.028764116967369732,
"mc1": 0.5336597307221542,
"mc1_stderr": 0.017463793867168106,
"mc2": 0.7032022498819784,
"mc2_stderr": 0.014189265275795037
},
"harness|arc:challenge|25": {
"acc": 0.6791808873720137,
"acc_stderr": 0.01364094309194653,
"acc_norm": 0.7098976109215017,
"acc_norm_stderr": 0.013261573677520767
},
"harness|hellaswag|10": {
"acc": 0.6525592511451902,
"acc_stderr": 0.004751840646730855,
"acc_norm": 0.8483369846644094,
"acc_norm_stderr": 0.0035796087435066093
},
"harness|hendrycksTest-abstract_algebra|5": {
"acc": 0.47,
"acc_stderr": 0.05016135580465919,
"acc_norm": 0.47,
"acc_norm_stderr": 0.05016135580465919
},
"harness|hendrycksTest-anatomy|5": {
"acc": 0.7481481481481481,
"acc_stderr": 0.03749850709174021,
"acc_norm": 0.7481481481481481,
"acc_norm_stderr": 0.03749850709174021
},
"harness|hendrycksTest-astronomy|5": {
"acc": 0.875,
"acc_stderr": 0.026913523521537846,
"acc_norm": 0.875,
"acc_norm_stderr": 0.026913523521537846
},
"harness|hendrycksTest-business_ethics|5": {
"acc": 0.79,
"acc_stderr": 0.040936018074033256,
"acc_norm": 0.79,
"acc_norm_stderr": 0.040936018074033256
},
"harness|hendrycksTest-clinical_knowledge|5": {
"acc": 0.8,
"acc_stderr": 0.024618298195866518,
"acc_norm": 0.8,
"acc_norm_stderr": 0.024618298195866518
},
"harness|hendrycksTest-college_biology|5": {
"acc": 0.8958333333333334,
"acc_stderr": 0.025545239210256917,
"acc_norm": 0.8958333333333334,
"acc_norm_stderr": 0.025545239210256917
},
"harness|hendrycksTest-college_chemistry|5": {
"acc": 0.52,
"acc_stderr": 0.050211673156867795,
"acc_norm": 0.52,
"acc_norm_stderr": 0.050211673156867795
},
"harness|hendrycksTest-college_computer_science|5": {
"acc": 0.61,
"acc_stderr": 0.04902071300001975,
"acc_norm": 0.61,
"acc_norm_stderr": 0.04902071300001975
},
"harness|hendrycksTest-college_mathematics|5": {
"acc": 0.39,
"acc_stderr": 0.04902071300001974,
"acc_norm": 0.39,
"acc_norm_stderr": 0.04902071300001974
},
"harness|hendrycksTest-college_medicine|5": {
"acc": 0.7514450867052023,
"acc_stderr": 0.03295304696818318,
"acc_norm": 0.7514450867052023,
"acc_norm_stderr": 0.03295304696818318
},
"harness|hendrycksTest-college_physics|5": {
"acc": 0.5686274509803921,
"acc_stderr": 0.04928099597287534,
"acc_norm": 0.5686274509803921,
"acc_norm_stderr": 0.04928099597287534
},
"harness|hendrycksTest-computer_security|5": {
"acc": 0.8,
"acc_stderr": 0.04020151261036845,
"acc_norm": 0.8,
"acc_norm_stderr": 0.04020151261036845
},
"harness|hendrycksTest-conceptual_physics|5": {
"acc": 0.7787234042553192,
"acc_stderr": 0.027136349602424056,
"acc_norm": 0.7787234042553192,
"acc_norm_stderr": 0.027136349602424056
},
"harness|hendrycksTest-econometrics|5": {
"acc": 0.5964912280701754,
"acc_stderr": 0.04615186962583707,
"acc_norm": 0.5964912280701754,
"acc_norm_stderr": 0.04615186962583707
},
"harness|hendrycksTest-electrical_engineering|5": {
"acc": 0.7379310344827587,
"acc_stderr": 0.036646663372252565,
"acc_norm": 0.7379310344827587,
"acc_norm_stderr": 0.036646663372252565
},
"harness|hendrycksTest-elementary_mathematics|5": {
"acc": 0.7248677248677249,
"acc_stderr": 0.023000086859068642,
"acc_norm": 0.7248677248677249,
"acc_norm_stderr": 0.023000086859068642
},
"harness|hendrycksTest-formal_logic|5": {
"acc": 0.6111111111111112,
"acc_stderr": 0.04360314860077459,
"acc_norm": 0.6111111111111112,
"acc_norm_stderr": 0.04360314860077459
},
"harness|hendrycksTest-global_facts|5": {
"acc": 0.56,
"acc_stderr": 0.04988876515698589,
"acc_norm": 0.56,
"acc_norm_stderr": 0.04988876515698589
},
"harness|hendrycksTest-high_school_biology|5": {
"acc": 0.9032258064516129,
"acc_stderr": 0.016818943416345197,
"acc_norm": 0.9032258064516129,
"acc_norm_stderr": 0.016818943416345197
},
"harness|hendrycksTest-high_school_chemistry|5": {
"acc": 0.6403940886699507,
"acc_stderr": 0.03376458246509567,
"acc_norm": 0.6403940886699507,
"acc_norm_stderr": 0.03376458246509567
},
"harness|hendrycksTest-high_school_computer_science|5": {
"acc": 0.8,
"acc_stderr": 0.04020151261036846,
"acc_norm": 0.8,
"acc_norm_stderr": 0.04020151261036846
},
"harness|hendrycksTest-high_school_european_history|5": {
"acc": 0.8666666666666667,
"acc_stderr": 0.026544435312706467,
"acc_norm": 0.8666666666666667,
"acc_norm_stderr": 0.026544435312706467
},
"harness|hendrycksTest-high_school_geography|5": {
"acc": 0.9292929292929293,
"acc_stderr": 0.018263105420199488,
"acc_norm": 0.9292929292929293,
"acc_norm_stderr": 0.018263105420199488
},
"harness|hendrycksTest-high_school_government_and_politics|5": {
"acc": 0.9740932642487047,
"acc_stderr": 0.011464523356953162,
"acc_norm": 0.9740932642487047,
"acc_norm_stderr": 0.011464523356953162
},
"harness|hendrycksTest-high_school_macroeconomics|5": {
"acc": 0.8153846153846154,
"acc_stderr": 0.01967163241310029,
"acc_norm": 0.8153846153846154,
"acc_norm_stderr": 0.01967163241310029
},
"harness|hendrycksTest-high_school_mathematics|5": {
"acc": 0.45555555555555555,
"acc_stderr": 0.03036486250482443,
"acc_norm": 0.45555555555555555,
"acc_norm_stderr": 0.03036486250482443
},
"harness|hendrycksTest-high_school_microeconomics|5": {
"acc": 0.8361344537815126,
"acc_stderr": 0.024044054940440488,
"acc_norm": 0.8361344537815126,
"acc_norm_stderr": 0.024044054940440488
},
"harness|hendrycksTest-high_school_physics|5": {
"acc": 0.5231788079470199,
"acc_stderr": 0.04078093859163085,
"acc_norm": 0.5231788079470199,
"acc_norm_stderr": 0.04078093859163085
},
"harness|hendrycksTest-high_school_psychology|5": {
"acc": 0.9211009174311927,
"acc_stderr": 0.011558198113769584,
"acc_norm": 0.9211009174311927,
"acc_norm_stderr": 0.011558198113769584
},
"harness|hendrycksTest-high_school_statistics|5": {
"acc": 0.6527777777777778,
"acc_stderr": 0.032468872436376486,
"acc_norm": 0.6527777777777778,
"acc_norm_stderr": 0.032468872436376486
},
"harness|hendrycksTest-high_school_us_history|5": {
"acc": 0.9215686274509803,
"acc_stderr": 0.018869514646658928,
"acc_norm": 0.9215686274509803,
"acc_norm_stderr": 0.018869514646658928
},
"harness|hendrycksTest-high_school_world_history|5": {
"acc": 0.9071729957805907,
"acc_stderr": 0.01888975055095671,
"acc_norm": 0.9071729957805907,
"acc_norm_stderr": 0.01888975055095671
},
"harness|hendrycksTest-human_aging|5": {
"acc": 0.7892376681614349,
"acc_stderr": 0.02737309550054019,
"acc_norm": 0.7892376681614349,
"acc_norm_stderr": 0.02737309550054019
},
"harness|hendrycksTest-human_sexuality|5": {
"acc": 0.8778625954198473,
"acc_stderr": 0.028718776889342323,
"acc_norm": 0.8778625954198473,
"acc_norm_stderr": 0.028718776889342323
},
"harness|hendrycksTest-international_law|5": {
"acc": 0.8842975206611571,
"acc_stderr": 0.02919980245562281,
"acc_norm": 0.8842975206611571,
"acc_norm_stderr": 0.02919980245562281
},
"harness|hendrycksTest-jurisprudence|5": {
"acc": 0.8981481481481481,
"acc_stderr": 0.02923927267563275,
"acc_norm": 0.8981481481481481,
"acc_norm_stderr": 0.02923927267563275
},
"harness|hendrycksTest-logical_fallacies|5": {
"acc": 0.8711656441717791,
"acc_stderr": 0.02632138319878367,
"acc_norm": 0.8711656441717791,
"acc_norm_stderr": 0.02632138319878367
},
"harness|hendrycksTest-machine_learning|5": {
"acc": 0.5357142857142857,
"acc_stderr": 0.04733667890053756,
"acc_norm": 0.5357142857142857,
"acc_norm_stderr": 0.04733667890053756
},
"harness|hendrycksTest-management|5": {
"acc": 0.883495145631068,
"acc_stderr": 0.03176683948640406,
"acc_norm": 0.883495145631068,
"acc_norm_stderr": 0.03176683948640406
},
"harness|hendrycksTest-marketing|5": {
"acc": 0.9444444444444444,
"acc_stderr": 0.015006312806446912,
"acc_norm": 0.9444444444444444,
"acc_norm_stderr": 0.015006312806446912
},
"harness|hendrycksTest-medical_genetics|5": {
"acc": 0.89,
"acc_stderr": 0.03144660377352203,
"acc_norm": 0.89,
"acc_norm_stderr": 0.03144660377352203
},
"harness|hendrycksTest-miscellaneous|5": {
"acc": 0.909323116219668,
"acc_stderr": 0.010268429662528548,
"acc_norm": 0.909323116219668,
"acc_norm_stderr": 0.010268429662528548
},
"harness|hendrycksTest-moral_disputes|5": {
"acc": 0.8208092485549133,
"acc_stderr": 0.020647590029679332,
"acc_norm": 0.8208092485549133,
"acc_norm_stderr": 0.020647590029679332
},
"harness|hendrycksTest-moral_scenarios|5": {
"acc": 0.8055865921787709,
"acc_stderr": 0.013235808096742286,
"acc_norm": 0.8055865921787709,
"acc_norm_stderr": 0.013235808096742286
},
"harness|hendrycksTest-nutrition|5": {
"acc": 0.8398692810457516,
"acc_stderr": 0.020998740930362303,
"acc_norm": 0.8398692810457516,
"acc_norm_stderr": 0.020998740930362303
},
"harness|hendrycksTest-philosophy|5": {
"acc": 0.797427652733119,
"acc_stderr": 0.02282731749105969,
"acc_norm": 0.797427652733119,
"acc_norm_stderr": 0.02282731749105969
},
"harness|hendrycksTest-prehistory|5": {
"acc": 0.8703703703703703,
"acc_stderr": 0.018689725721062075,
"acc_norm": 0.8703703703703703,
"acc_norm_stderr": 0.018689725721062075
},
"harness|hendrycksTest-professional_accounting|5": {
"acc": 0.6276595744680851,
"acc_stderr": 0.02883892147125145,
"acc_norm": 0.6276595744680851,
"acc_norm_stderr": 0.02883892147125145
},
"harness|hendrycksTest-professional_law|5": {
"acc": 0.5808344198174706,
"acc_stderr": 0.012602244505788228,
"acc_norm": 0.5808344198174706,
"acc_norm_stderr": 0.012602244505788228
},
"harness|hendrycksTest-professional_medicine|5": {
"acc": 0.8235294117647058,
"acc_stderr": 0.023157468308559342,
"acc_norm": 0.8235294117647058,
"acc_norm_stderr": 0.023157468308559342
},
"harness|hendrycksTest-professional_psychology|5": {
"acc": 0.815359477124183,
"acc_stderr": 0.01569702924075778,
"acc_norm": 0.815359477124183,
"acc_norm_stderr": 0.01569702924075778
},
"harness|hendrycksTest-public_relations|5": {
"acc": 0.7272727272727273,
"acc_stderr": 0.04265792110940589,
"acc_norm": 0.7272727272727273,
"acc_norm_stderr": 0.04265792110940589
},
"harness|hendrycksTest-security_studies|5": {
"acc": 0.8489795918367347,
"acc_stderr": 0.022923004094736854,
"acc_norm": 0.8489795918367347,
"acc_norm_stderr": 0.022923004094736854
},
"harness|hendrycksTest-sociology|5": {
"acc": 0.8955223880597015,
"acc_stderr": 0.021628920516700643,
"acc_norm": 0.8955223880597015,
"acc_norm_stderr": 0.021628920516700643
},
"harness|hendrycksTest-us_foreign_policy|5": {
"acc": 0.9,
"acc_stderr": 0.030151134457776334,
"acc_norm": 0.9,
"acc_norm_stderr": 0.030151134457776334
},
"harness|hendrycksTest-virology|5": {
"acc": 0.5843373493975904,
"acc_stderr": 0.03836722176598053,
"acc_norm": 0.5843373493975904,
"acc_norm_stderr": 0.03836722176598053
},
"harness|hendrycksTest-world_religions|5": {
"acc": 0.8830409356725146,
"acc_stderr": 0.024648068961366152,
"acc_norm": 0.8830409356725146,
"acc_norm_stderr": 0.024648068961366152
},
"harness|truthfulqa:mc|0": {
"mc1": 0.5336597307221542,
"mc1_stderr": 0.017463793867168106,
"mc2": 0.7032022498819784,
"mc2_stderr": 0.014189265275795037
},
"harness|winogrande|5": {
"acc": 0.8263614838200474,
"acc_stderr": 0.010646116480330996
},
"harness|gsm8k|5": {
"acc": 0.6747536012130402,
"acc_stderr": 0.012903904752543913
}
}
```
## Dataset Details
### Dataset Description
<!-- Provide a longer summary of what this dataset is. -->
- **Curated by:** [More Information Needed]
- **Funded by [optional]:** [More Information Needed]
- **Shared by [optional]:** [More Information Needed]
- **Language(s) (NLP):** [More Information Needed]
- **License:** [More Information Needed]
### Dataset Sources [optional]
<!-- Provide the basic links for the dataset. -->
- **Repository:** [More Information Needed]
- **Paper [optional]:** [More Information Needed]
- **Demo [optional]:** [More Information Needed]
## Uses
<!-- Address questions around how the dataset is intended to be used. -->
### Direct Use
<!-- This section describes suitable use cases for the dataset. -->
[More Information Needed]
### Out-of-Scope Use
<!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. -->
[More Information Needed]
## Dataset Structure
<!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. -->
[More Information Needed]
## Dataset Creation
### Curation Rationale
<!-- Motivation for the creation of this dataset. -->
[More Information Needed]
### Source Data
<!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). -->
#### Data Collection and Processing
<!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. -->
[More Information Needed]
#### Who are the source data producers?
<!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. -->
[More Information Needed]
### Annotations [optional]
<!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. -->
#### Annotation process
<!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. -->
[More Information Needed]
#### Who are the annotators?
<!-- This section describes the people or systems who created the annotations. -->
[More Information Needed]
#### Personal and Sensitive Information
<!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. -->
[More Information Needed]
## Bias, Risks, and Limitations
<!-- This section is meant to convey both technical and sociotechnical limitations. -->
[More Information Needed]
### Recommendations
<!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. -->
Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.
## Citation [optional]
<!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. -->
**BibTeX:**
[More Information Needed]
**APA:**
[More Information Needed]
## Glossary [optional]
<!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. -->
[More Information Needed]
## More Information [optional]
[More Information Needed]
## Dataset Card Authors [optional]
[More Information Needed]
## Dataset Card Contact
[More Information Needed] | open-llm-leaderboard/details_louisbrulenaudet__Pearl-34B-ties | [
"region:us"
] | 2024-02-15T20:31:45+00:00 | {"pretty_name": "Evaluation run of louisbrulenaudet/Pearl-34B-ties", "dataset_summary": "Dataset automatically created during the evaluation run of model [louisbrulenaudet/Pearl-34B-ties](https://huggingface.co/louisbrulenaudet/Pearl-34B-ties) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_louisbrulenaudet__Pearl-34B-ties\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-02-15T20:29:21.982361](https://huggingface.co/datasets/open-llm-leaderboard/details_louisbrulenaudet__Pearl-34B-ties/blob/main/results_2024-02-15T20-29-21.982361.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.7624896367346236,\n \"acc_stderr\": 0.02823253317418589,\n \"acc_norm\": 0.7667330036075873,\n \"acc_norm_stderr\": 0.028764116967369732,\n \"mc1\": 0.5336597307221542,\n \"mc1_stderr\": 0.017463793867168106,\n \"mc2\": 0.7032022498819784,\n \"mc2_stderr\": 0.014189265275795037\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.6791808873720137,\n \"acc_stderr\": 0.01364094309194653,\n \"acc_norm\": 0.7098976109215017,\n \"acc_norm_stderr\": 0.013261573677520767\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6525592511451902,\n \"acc_stderr\": 0.004751840646730855,\n \"acc_norm\": 0.8483369846644094,\n \"acc_norm_stderr\": 0.0035796087435066093\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.47,\n \"acc_stderr\": 0.05016135580465919,\n \"acc_norm\": 0.47,\n \"acc_norm_stderr\": 0.05016135580465919\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.7481481481481481,\n \"acc_stderr\": 0.03749850709174021,\n \"acc_norm\": 0.7481481481481481,\n \"acc_norm_stderr\": 0.03749850709174021\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.875,\n \"acc_stderr\": 0.026913523521537846,\n \"acc_norm\": 0.875,\n \"acc_norm_stderr\": 0.026913523521537846\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.79,\n \"acc_stderr\": 0.040936018074033256,\n \"acc_norm\": 0.79,\n \"acc_norm_stderr\": 0.040936018074033256\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.8,\n \"acc_stderr\": 0.024618298195866518,\n \"acc_norm\": 0.8,\n \"acc_norm_stderr\": 0.024618298195866518\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.8958333333333334,\n \"acc_stderr\": 0.025545239210256917,\n \"acc_norm\": 0.8958333333333334,\n \"acc_norm_stderr\": 0.025545239210256917\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.52,\n \"acc_stderr\": 0.050211673156867795,\n \"acc_norm\": 0.52,\n \"acc_norm_stderr\": 0.050211673156867795\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.61,\n \"acc_stderr\": 0.04902071300001975,\n \"acc_norm\": 0.61,\n \"acc_norm_stderr\": 0.04902071300001975\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.39,\n \"acc_stderr\": 0.04902071300001974,\n \"acc_norm\": 0.39,\n \"acc_norm_stderr\": 0.04902071300001974\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.7514450867052023,\n \"acc_stderr\": 0.03295304696818318,\n \"acc_norm\": 0.7514450867052023,\n \"acc_norm_stderr\": 0.03295304696818318\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.5686274509803921,\n \"acc_stderr\": 0.04928099597287534,\n \"acc_norm\": 0.5686274509803921,\n \"acc_norm_stderr\": 0.04928099597287534\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.8,\n \"acc_stderr\": 0.04020151261036845,\n \"acc_norm\": 0.8,\n \"acc_norm_stderr\": 0.04020151261036845\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.7787234042553192,\n \"acc_stderr\": 0.027136349602424056,\n \"acc_norm\": 0.7787234042553192,\n \"acc_norm_stderr\": 0.027136349602424056\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.5964912280701754,\n \"acc_stderr\": 0.04615186962583707,\n \"acc_norm\": 0.5964912280701754,\n \"acc_norm_stderr\": 0.04615186962583707\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.7379310344827587,\n \"acc_stderr\": 0.036646663372252565,\n \"acc_norm\": 0.7379310344827587,\n \"acc_norm_stderr\": 0.036646663372252565\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.7248677248677249,\n \"acc_stderr\": 0.023000086859068642,\n \"acc_norm\": 0.7248677248677249,\n \"acc_norm_stderr\": 0.023000086859068642\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.6111111111111112,\n \"acc_stderr\": 0.04360314860077459,\n \"acc_norm\": 0.6111111111111112,\n \"acc_norm_stderr\": 0.04360314860077459\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.56,\n \"acc_stderr\": 0.04988876515698589,\n \"acc_norm\": 0.56,\n \"acc_norm_stderr\": 0.04988876515698589\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.9032258064516129,\n \"acc_stderr\": 0.016818943416345197,\n \"acc_norm\": 0.9032258064516129,\n \"acc_norm_stderr\": 0.016818943416345197\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.6403940886699507,\n \"acc_stderr\": 0.03376458246509567,\n \"acc_norm\": 0.6403940886699507,\n \"acc_norm_stderr\": 0.03376458246509567\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.8,\n \"acc_stderr\": 0.04020151261036846,\n \"acc_norm\": 0.8,\n \"acc_norm_stderr\": 0.04020151261036846\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.8666666666666667,\n \"acc_stderr\": 0.026544435312706467,\n \"acc_norm\": 0.8666666666666667,\n \"acc_norm_stderr\": 0.026544435312706467\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.9292929292929293,\n \"acc_stderr\": 0.018263105420199488,\n \"acc_norm\": 0.9292929292929293,\n \"acc_norm_stderr\": 0.018263105420199488\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.9740932642487047,\n \"acc_stderr\": 0.011464523356953162,\n \"acc_norm\": 0.9740932642487047,\n \"acc_norm_stderr\": 0.011464523356953162\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.8153846153846154,\n \"acc_stderr\": 0.01967163241310029,\n \"acc_norm\": 0.8153846153846154,\n \"acc_norm_stderr\": 0.01967163241310029\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.45555555555555555,\n \"acc_stderr\": 0.03036486250482443,\n \"acc_norm\": 0.45555555555555555,\n \"acc_norm_stderr\": 0.03036486250482443\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.8361344537815126,\n \"acc_stderr\": 0.024044054940440488,\n \"acc_norm\": 0.8361344537815126,\n \"acc_norm_stderr\": 0.024044054940440488\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.5231788079470199,\n \"acc_stderr\": 0.04078093859163085,\n \"acc_norm\": 0.5231788079470199,\n \"acc_norm_stderr\": 0.04078093859163085\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.9211009174311927,\n \"acc_stderr\": 0.011558198113769584,\n \"acc_norm\": 0.9211009174311927,\n \"acc_norm_stderr\": 0.011558198113769584\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.6527777777777778,\n \"acc_stderr\": 0.032468872436376486,\n \"acc_norm\": 0.6527777777777778,\n \"acc_norm_stderr\": 0.032468872436376486\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.9215686274509803,\n \"acc_stderr\": 0.018869514646658928,\n \"acc_norm\": 0.9215686274509803,\n \"acc_norm_stderr\": 0.018869514646658928\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.9071729957805907,\n \"acc_stderr\": 0.01888975055095671,\n \"acc_norm\": 0.9071729957805907,\n \"acc_norm_stderr\": 0.01888975055095671\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.7892376681614349,\n \"acc_stderr\": 0.02737309550054019,\n \"acc_norm\": 0.7892376681614349,\n \"acc_norm_stderr\": 0.02737309550054019\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.8778625954198473,\n \"acc_stderr\": 0.028718776889342323,\n \"acc_norm\": 0.8778625954198473,\n \"acc_norm_stderr\": 0.028718776889342323\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.8842975206611571,\n \"acc_stderr\": 0.02919980245562281,\n \"acc_norm\": 0.8842975206611571,\n \"acc_norm_stderr\": 0.02919980245562281\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.8981481481481481,\n \"acc_stderr\": 0.02923927267563275,\n \"acc_norm\": 0.8981481481481481,\n \"acc_norm_stderr\": 0.02923927267563275\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.8711656441717791,\n \"acc_stderr\": 0.02632138319878367,\n \"acc_norm\": 0.8711656441717791,\n \"acc_norm_stderr\": 0.02632138319878367\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.5357142857142857,\n \"acc_stderr\": 0.04733667890053756,\n \"acc_norm\": 0.5357142857142857,\n \"acc_norm_stderr\": 0.04733667890053756\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.883495145631068,\n \"acc_stderr\": 0.03176683948640406,\n \"acc_norm\": 0.883495145631068,\n \"acc_norm_stderr\": 0.03176683948640406\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.9444444444444444,\n \"acc_stderr\": 0.015006312806446912,\n \"acc_norm\": 0.9444444444444444,\n \"acc_norm_stderr\": 0.015006312806446912\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.89,\n \"acc_stderr\": 0.03144660377352203,\n \"acc_norm\": 0.89,\n \"acc_norm_stderr\": 0.03144660377352203\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.909323116219668,\n \"acc_stderr\": 0.010268429662528548,\n \"acc_norm\": 0.909323116219668,\n \"acc_norm_stderr\": 0.010268429662528548\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.8208092485549133,\n \"acc_stderr\": 0.020647590029679332,\n \"acc_norm\": 0.8208092485549133,\n \"acc_norm_stderr\": 0.020647590029679332\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.8055865921787709,\n \"acc_stderr\": 0.013235808096742286,\n \"acc_norm\": 0.8055865921787709,\n \"acc_norm_stderr\": 0.013235808096742286\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.8398692810457516,\n \"acc_stderr\": 0.020998740930362303,\n \"acc_norm\": 0.8398692810457516,\n \"acc_norm_stderr\": 0.020998740930362303\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.797427652733119,\n \"acc_stderr\": 0.02282731749105969,\n \"acc_norm\": 0.797427652733119,\n \"acc_norm_stderr\": 0.02282731749105969\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.8703703703703703,\n \"acc_stderr\": 0.018689725721062075,\n \"acc_norm\": 0.8703703703703703,\n \"acc_norm_stderr\": 0.018689725721062075\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.6276595744680851,\n \"acc_stderr\": 0.02883892147125145,\n \"acc_norm\": 0.6276595744680851,\n \"acc_norm_stderr\": 0.02883892147125145\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.5808344198174706,\n \"acc_stderr\": 0.012602244505788228,\n \"acc_norm\": 0.5808344198174706,\n \"acc_norm_stderr\": 0.012602244505788228\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.8235294117647058,\n \"acc_stderr\": 0.023157468308559342,\n \"acc_norm\": 0.8235294117647058,\n \"acc_norm_stderr\": 0.023157468308559342\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.815359477124183,\n \"acc_stderr\": 0.01569702924075778,\n \"acc_norm\": 0.815359477124183,\n \"acc_norm_stderr\": 0.01569702924075778\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.7272727272727273,\n \"acc_stderr\": 0.04265792110940589,\n \"acc_norm\": 0.7272727272727273,\n \"acc_norm_stderr\": 0.04265792110940589\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.8489795918367347,\n \"acc_stderr\": 0.022923004094736854,\n \"acc_norm\": 0.8489795918367347,\n \"acc_norm_stderr\": 0.022923004094736854\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.8955223880597015,\n \"acc_stderr\": 0.021628920516700643,\n \"acc_norm\": 0.8955223880597015,\n \"acc_norm_stderr\": 0.021628920516700643\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.9,\n \"acc_stderr\": 0.030151134457776334,\n \"acc_norm\": 0.9,\n \"acc_norm_stderr\": 0.030151134457776334\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5843373493975904,\n \"acc_stderr\": 0.03836722176598053,\n \"acc_norm\": 0.5843373493975904,\n \"acc_norm_stderr\": 0.03836722176598053\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.8830409356725146,\n \"acc_stderr\": 0.024648068961366152,\n \"acc_norm\": 0.8830409356725146,\n \"acc_norm_stderr\": 0.024648068961366152\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.5336597307221542,\n \"mc1_stderr\": 0.017463793867168106,\n \"mc2\": 0.7032022498819784,\n \"mc2_stderr\": 0.014189265275795037\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.8263614838200474,\n \"acc_stderr\": 0.010646116480330996\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.6747536012130402,\n \"acc_stderr\": 0.012903904752543913\n }\n}\n```", "repo_url": "https://huggingface.co/louisbrulenaudet/Pearl-34B-ties", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_02_15T20_29_21.982361", "path": ["**/details_harness|arc:challenge|25_2024-02-15T20-29-21.982361.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-02-15T20-29-21.982361.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_02_15T20_29_21.982361", "path": ["**/details_harness|gsm8k|5_2024-02-15T20-29-21.982361.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-02-15T20-29-21.982361.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_02_15T20_29_21.982361", "path": ["**/details_harness|hellaswag|10_2024-02-15T20-29-21.982361.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-02-15T20-29-21.982361.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_02_15T20_29_21.982361", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-15T20-29-21.982361.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-02-15T20-29-21.982361.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-02-15T20-29-21.982361.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-02-15T20-29-21.982361.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-15T20-29-21.982361.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-02-15T20-29-21.982361.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-02-15T20-29-21.982361.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-02-15T20-29-21.982361.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-02-15T20-29-21.982361.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-02-15T20-29-21.982361.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-02-15T20-29-21.982361.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-02-15T20-29-21.982361.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-15T20-29-21.982361.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-02-15T20-29-21.982361.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-15T20-29-21.982361.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-15T20-29-21.982361.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-02-15T20-29-21.982361.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-02-15T20-29-21.982361.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-02-15T20-29-21.982361.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-15T20-29-21.982361.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-15T20-29-21.982361.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-15T20-29-21.982361.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-02-15T20-29-21.982361.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-15T20-29-21.982361.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-15T20-29-21.982361.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-15T20-29-21.982361.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-15T20-29-21.982361.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-02-15T20-29-21.982361.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-15T20-29-21.982361.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-15T20-29-21.982361.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-15T20-29-21.982361.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-15T20-29-21.982361.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-02-15T20-29-21.982361.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-02-15T20-29-21.982361.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-02-15T20-29-21.982361.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-02-15T20-29-21.982361.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-15T20-29-21.982361.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-02-15T20-29-21.982361.parquet", "**/details_harness|hendrycksTest-management|5_2024-02-15T20-29-21.982361.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-02-15T20-29-21.982361.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-02-15T20-29-21.982361.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-02-15T20-29-21.982361.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-02-15T20-29-21.982361.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-15T20-29-21.982361.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-02-15T20-29-21.982361.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-02-15T20-29-21.982361.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-02-15T20-29-21.982361.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-02-15T20-29-21.982361.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-02-15T20-29-21.982361.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-02-15T20-29-21.982361.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-02-15T20-29-21.982361.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-02-15T20-29-21.982361.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-02-15T20-29-21.982361.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-02-15T20-29-21.982361.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-15T20-29-21.982361.parquet", "**/details_harness|hendrycksTest-virology|5_2024-02-15T20-29-21.982361.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-02-15T20-29-21.982361.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-15T20-29-21.982361.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-02-15T20-29-21.982361.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-02-15T20-29-21.982361.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-02-15T20-29-21.982361.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-15T20-29-21.982361.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-02-15T20-29-21.982361.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-02-15T20-29-21.982361.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-02-15T20-29-21.982361.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-02-15T20-29-21.982361.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-02-15T20-29-21.982361.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-02-15T20-29-21.982361.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-02-15T20-29-21.982361.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-15T20-29-21.982361.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-02-15T20-29-21.982361.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-15T20-29-21.982361.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-15T20-29-21.982361.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-02-15T20-29-21.982361.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-02-15T20-29-21.982361.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-02-15T20-29-21.982361.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-15T20-29-21.982361.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-15T20-29-21.982361.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-15T20-29-21.982361.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-02-15T20-29-21.982361.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-15T20-29-21.982361.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-15T20-29-21.982361.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-15T20-29-21.982361.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-15T20-29-21.982361.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-02-15T20-29-21.982361.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-15T20-29-21.982361.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-15T20-29-21.982361.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-15T20-29-21.982361.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-15T20-29-21.982361.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-02-15T20-29-21.982361.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-02-15T20-29-21.982361.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-02-15T20-29-21.982361.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-02-15T20-29-21.982361.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-15T20-29-21.982361.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-02-15T20-29-21.982361.parquet", "**/details_harness|hendrycksTest-management|5_2024-02-15T20-29-21.982361.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-02-15T20-29-21.982361.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-02-15T20-29-21.982361.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-02-15T20-29-21.982361.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-02-15T20-29-21.982361.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-15T20-29-21.982361.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-02-15T20-29-21.982361.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-02-15T20-29-21.982361.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-02-15T20-29-21.982361.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-02-15T20-29-21.982361.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-02-15T20-29-21.982361.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-02-15T20-29-21.982361.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-02-15T20-29-21.982361.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-02-15T20-29-21.982361.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-02-15T20-29-21.982361.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-02-15T20-29-21.982361.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-15T20-29-21.982361.parquet", "**/details_harness|hendrycksTest-virology|5_2024-02-15T20-29-21.982361.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-02-15T20-29-21.982361.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_02_15T20_29_21.982361", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-15T20-29-21.982361.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-15T20-29-21.982361.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_02_15T20_29_21.982361", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-02-15T20-29-21.982361.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-02-15T20-29-21.982361.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_02_15T20_29_21.982361", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-02-15T20-29-21.982361.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-02-15T20-29-21.982361.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_02_15T20_29_21.982361", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-02-15T20-29-21.982361.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-02-15T20-29-21.982361.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_02_15T20_29_21.982361", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-15T20-29-21.982361.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-15T20-29-21.982361.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_02_15T20_29_21.982361", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-02-15T20-29-21.982361.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-02-15T20-29-21.982361.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_02_15T20_29_21.982361", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-02-15T20-29-21.982361.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-02-15T20-29-21.982361.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_02_15T20_29_21.982361", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-02-15T20-29-21.982361.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-02-15T20-29-21.982361.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_02_15T20_29_21.982361", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-02-15T20-29-21.982361.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-02-15T20-29-21.982361.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_02_15T20_29_21.982361", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-02-15T20-29-21.982361.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-02-15T20-29-21.982361.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_02_15T20_29_21.982361", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-02-15T20-29-21.982361.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-02-15T20-29-21.982361.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_02_15T20_29_21.982361", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-02-15T20-29-21.982361.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-02-15T20-29-21.982361.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_02_15T20_29_21.982361", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-15T20-29-21.982361.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-15T20-29-21.982361.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_02_15T20_29_21.982361", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-02-15T20-29-21.982361.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-02-15T20-29-21.982361.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_02_15T20_29_21.982361", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-15T20-29-21.982361.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-15T20-29-21.982361.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_02_15T20_29_21.982361", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-15T20-29-21.982361.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-15T20-29-21.982361.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_02_15T20_29_21.982361", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-02-15T20-29-21.982361.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-02-15T20-29-21.982361.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_02_15T20_29_21.982361", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-02-15T20-29-21.982361.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-02-15T20-29-21.982361.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_02_15T20_29_21.982361", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-02-15T20-29-21.982361.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-02-15T20-29-21.982361.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_02_15T20_29_21.982361", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-15T20-29-21.982361.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-15T20-29-21.982361.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_02_15T20_29_21.982361", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-15T20-29-21.982361.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-15T20-29-21.982361.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_02_15T20_29_21.982361", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-15T20-29-21.982361.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-15T20-29-21.982361.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_02_15T20_29_21.982361", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-02-15T20-29-21.982361.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-02-15T20-29-21.982361.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_02_15T20_29_21.982361", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-15T20-29-21.982361.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-15T20-29-21.982361.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_02_15T20_29_21.982361", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-15T20-29-21.982361.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-15T20-29-21.982361.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_02_15T20_29_21.982361", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-15T20-29-21.982361.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-15T20-29-21.982361.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_02_15T20_29_21.982361", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-15T20-29-21.982361.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-15T20-29-21.982361.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_02_15T20_29_21.982361", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-02-15T20-29-21.982361.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-02-15T20-29-21.982361.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_02_15T20_29_21.982361", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-15T20-29-21.982361.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-15T20-29-21.982361.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_02_15T20_29_21.982361", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-15T20-29-21.982361.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-15T20-29-21.982361.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_02_15T20_29_21.982361", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-15T20-29-21.982361.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-15T20-29-21.982361.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_02_15T20_29_21.982361", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-15T20-29-21.982361.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-15T20-29-21.982361.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_02_15T20_29_21.982361", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-02-15T20-29-21.982361.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-02-15T20-29-21.982361.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_02_15T20_29_21.982361", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-02-15T20-29-21.982361.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-02-15T20-29-21.982361.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_02_15T20_29_21.982361", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-02-15T20-29-21.982361.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-02-15T20-29-21.982361.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_02_15T20_29_21.982361", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-02-15T20-29-21.982361.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-02-15T20-29-21.982361.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_02_15T20_29_21.982361", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-15T20-29-21.982361.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-15T20-29-21.982361.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_02_15T20_29_21.982361", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-02-15T20-29-21.982361.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-02-15T20-29-21.982361.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_02_15T20_29_21.982361", "path": ["**/details_harness|hendrycksTest-management|5_2024-02-15T20-29-21.982361.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-02-15T20-29-21.982361.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_02_15T20_29_21.982361", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-02-15T20-29-21.982361.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-02-15T20-29-21.982361.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_02_15T20_29_21.982361", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-02-15T20-29-21.982361.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-02-15T20-29-21.982361.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_02_15T20_29_21.982361", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-02-15T20-29-21.982361.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-02-15T20-29-21.982361.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_02_15T20_29_21.982361", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-02-15T20-29-21.982361.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-02-15T20-29-21.982361.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_02_15T20_29_21.982361", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-15T20-29-21.982361.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-15T20-29-21.982361.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_02_15T20_29_21.982361", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-02-15T20-29-21.982361.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-02-15T20-29-21.982361.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_02_15T20_29_21.982361", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-02-15T20-29-21.982361.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-02-15T20-29-21.982361.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_02_15T20_29_21.982361", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-02-15T20-29-21.982361.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-02-15T20-29-21.982361.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_02_15T20_29_21.982361", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-02-15T20-29-21.982361.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-02-15T20-29-21.982361.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_02_15T20_29_21.982361", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-02-15T20-29-21.982361.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-02-15T20-29-21.982361.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_02_15T20_29_21.982361", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-02-15T20-29-21.982361.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-02-15T20-29-21.982361.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_02_15T20_29_21.982361", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-02-15T20-29-21.982361.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-02-15T20-29-21.982361.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_02_15T20_29_21.982361", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-02-15T20-29-21.982361.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-02-15T20-29-21.982361.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_02_15T20_29_21.982361", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-02-15T20-29-21.982361.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-02-15T20-29-21.982361.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_02_15T20_29_21.982361", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-02-15T20-29-21.982361.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-02-15T20-29-21.982361.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_02_15T20_29_21.982361", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-15T20-29-21.982361.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-15T20-29-21.982361.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_02_15T20_29_21.982361", "path": ["**/details_harness|hendrycksTest-virology|5_2024-02-15T20-29-21.982361.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-02-15T20-29-21.982361.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_02_15T20_29_21.982361", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-02-15T20-29-21.982361.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-02-15T20-29-21.982361.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_02_15T20_29_21.982361", "path": ["**/details_harness|truthfulqa:mc|0_2024-02-15T20-29-21.982361.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-02-15T20-29-21.982361.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_02_15T20_29_21.982361", "path": ["**/details_harness|winogrande|5_2024-02-15T20-29-21.982361.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-02-15T20-29-21.982361.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_02_15T20_29_21.982361", "path": ["results_2024-02-15T20-29-21.982361.parquet"]}, {"split": "latest", "path": ["results_2024-02-15T20-29-21.982361.parquet"]}]}]} | 2024-02-15T20:32:19+00:00 |
b019803e0a99af90e6f01dd6d8a4603b018c8541 | mikeg2/vozclaude | [
"license:openrail",
"region:us"
] | 2024-02-15T20:33:39+00:00 | {"license": "openrail"} | 2024-02-15T20:34:04+00:00 |
|
aff8f19ac53f559b73b17b508cbccf0ce7dcca05 | sxandie/arti_kushwaha_cat | [
"region:us"
] | 2024-02-15T20:35:18+00:00 | {} | 2024-02-15T20:45:11+00:00 |
|
f2bdf6ac20a5f53a508bf14aa45662d10a74c65d | rookshanks/small-the_pile | [
"region:us"
] | 2024-02-15T20:36:23+00:00 | {"dataset_info": {"features": [{"name": "text", "dtype": "string"}, {"name": "meta", "struct": [{"name": "perplexity_score", "dtype": "float64"}, {"name": "pile_set_name", "dtype": "string"}]}], "splits": [{"name": "train", "num_bytes": 484845334.4, "num_examples": 80000}, {"name": "validation", "num_bytes": 60605666.8, "num_examples": 10000}, {"name": "test", "num_bytes": 60605666.8, "num_examples": 10000}], "download_size": 329390472, "dataset_size": 606056667.9999999}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}, {"split": "validation", "path": "data/validation-*"}, {"split": "test", "path": "data/test-*"}]}]} | 2024-02-15T20:36:47+00:00 |
|
5fb3b6308132804ce31daa3cc5629e43837c40a7 |
# Dataset Card for Evaluation run of BarraHome/Wistral-7B-Instruct-v0.4
<!-- Provide a quick summary of the dataset. -->
Dataset automatically created during the evaluation run of model [BarraHome/Wistral-7B-Instruct-v0.4](https://huggingface.co/BarraHome/Wistral-7B-Instruct-v0.4) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).
The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.
The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results.
An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).
To load the details from a run, you can for instance do the following:
```python
from datasets import load_dataset
data = load_dataset("open-llm-leaderboard/details_BarraHome__Wistral-7B-Instruct-v0.4",
"harness_winogrande_5",
split="train")
```
## Latest results
These are the [latest results from run 2024-02-15T20:35:44.878136](https://huggingface.co/datasets/open-llm-leaderboard/details_BarraHome__Wistral-7B-Instruct-v0.4/blob/main/results_2024-02-15T20-35-44.878136.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval):
```python
{
"all": {
"acc": 0.6032184784518743,
"acc_stderr": 0.03333730204729809,
"acc_norm": 0.607891645213564,
"acc_norm_stderr": 0.03401402537730786,
"mc1": 0.5226438188494492,
"mc1_stderr": 0.01748554225848964,
"mc2": 0.6766513448639357,
"mc2_stderr": 0.015264009667659464
},
"harness|arc:challenge|25": {
"acc": 0.575938566552901,
"acc_stderr": 0.014441889627464392,
"acc_norm": 0.6220136518771331,
"acc_norm_stderr": 0.0141696645203031
},
"harness|hellaswag|10": {
"acc": 0.6612228639713205,
"acc_stderr": 0.004723266971563391,
"acc_norm": 0.8481378211511651,
"acc_norm_stderr": 0.0035815378475817935
},
"harness|hendrycksTest-abstract_algebra|5": {
"acc": 0.36,
"acc_stderr": 0.04824181513244218,
"acc_norm": 0.36,
"acc_norm_stderr": 0.04824181513244218
},
"harness|hendrycksTest-anatomy|5": {
"acc": 0.5777777777777777,
"acc_stderr": 0.04266763404099582,
"acc_norm": 0.5777777777777777,
"acc_norm_stderr": 0.04266763404099582
},
"harness|hendrycksTest-astronomy|5": {
"acc": 0.625,
"acc_stderr": 0.039397364351956274,
"acc_norm": 0.625,
"acc_norm_stderr": 0.039397364351956274
},
"harness|hendrycksTest-business_ethics|5": {
"acc": 0.58,
"acc_stderr": 0.049604496374885836,
"acc_norm": 0.58,
"acc_norm_stderr": 0.049604496374885836
},
"harness|hendrycksTest-clinical_knowledge|5": {
"acc": 0.6754716981132075,
"acc_stderr": 0.02881561571343211,
"acc_norm": 0.6754716981132075,
"acc_norm_stderr": 0.02881561571343211
},
"harness|hendrycksTest-college_biology|5": {
"acc": 0.6597222222222222,
"acc_stderr": 0.039621355734862175,
"acc_norm": 0.6597222222222222,
"acc_norm_stderr": 0.039621355734862175
},
"harness|hendrycksTest-college_chemistry|5": {
"acc": 0.37,
"acc_stderr": 0.04852365870939099,
"acc_norm": 0.37,
"acc_norm_stderr": 0.04852365870939099
},
"harness|hendrycksTest-college_computer_science|5": {
"acc": 0.47,
"acc_stderr": 0.05016135580465919,
"acc_norm": 0.47,
"acc_norm_stderr": 0.05016135580465919
},
"harness|hendrycksTest-college_mathematics|5": {
"acc": 0.38,
"acc_stderr": 0.048783173121456316,
"acc_norm": 0.38,
"acc_norm_stderr": 0.048783173121456316
},
"harness|hendrycksTest-college_medicine|5": {
"acc": 0.5838150289017341,
"acc_stderr": 0.03758517775404947,
"acc_norm": 0.5838150289017341,
"acc_norm_stderr": 0.03758517775404947
},
"harness|hendrycksTest-college_physics|5": {
"acc": 0.43137254901960786,
"acc_stderr": 0.04928099597287534,
"acc_norm": 0.43137254901960786,
"acc_norm_stderr": 0.04928099597287534
},
"harness|hendrycksTest-computer_security|5": {
"acc": 0.72,
"acc_stderr": 0.04512608598542128,
"acc_norm": 0.72,
"acc_norm_stderr": 0.04512608598542128
},
"harness|hendrycksTest-conceptual_physics|5": {
"acc": 0.5148936170212766,
"acc_stderr": 0.03267151848924777,
"acc_norm": 0.5148936170212766,
"acc_norm_stderr": 0.03267151848924777
},
"harness|hendrycksTest-econometrics|5": {
"acc": 0.43859649122807015,
"acc_stderr": 0.04668000738510455,
"acc_norm": 0.43859649122807015,
"acc_norm_stderr": 0.04668000738510455
},
"harness|hendrycksTest-electrical_engineering|5": {
"acc": 0.5724137931034483,
"acc_stderr": 0.041227371113703316,
"acc_norm": 0.5724137931034483,
"acc_norm_stderr": 0.041227371113703316
},
"harness|hendrycksTest-elementary_mathematics|5": {
"acc": 0.38095238095238093,
"acc_stderr": 0.025010749116137602,
"acc_norm": 0.38095238095238093,
"acc_norm_stderr": 0.025010749116137602
},
"harness|hendrycksTest-formal_logic|5": {
"acc": 0.3888888888888889,
"acc_stderr": 0.04360314860077459,
"acc_norm": 0.3888888888888889,
"acc_norm_stderr": 0.04360314860077459
},
"harness|hendrycksTest-global_facts|5": {
"acc": 0.41,
"acc_stderr": 0.049431107042371025,
"acc_norm": 0.41,
"acc_norm_stderr": 0.049431107042371025
},
"harness|hendrycksTest-high_school_biology|5": {
"acc": 0.6774193548387096,
"acc_stderr": 0.026593084516572277,
"acc_norm": 0.6774193548387096,
"acc_norm_stderr": 0.026593084516572277
},
"harness|hendrycksTest-high_school_chemistry|5": {
"acc": 0.4975369458128079,
"acc_stderr": 0.03517945038691063,
"acc_norm": 0.4975369458128079,
"acc_norm_stderr": 0.03517945038691063
},
"harness|hendrycksTest-high_school_computer_science|5": {
"acc": 0.61,
"acc_stderr": 0.04902071300001975,
"acc_norm": 0.61,
"acc_norm_stderr": 0.04902071300001975
},
"harness|hendrycksTest-high_school_european_history|5": {
"acc": 0.7212121212121212,
"acc_stderr": 0.03501438706296781,
"acc_norm": 0.7212121212121212,
"acc_norm_stderr": 0.03501438706296781
},
"harness|hendrycksTest-high_school_geography|5": {
"acc": 0.7474747474747475,
"acc_stderr": 0.030954055470365897,
"acc_norm": 0.7474747474747475,
"acc_norm_stderr": 0.030954055470365897
},
"harness|hendrycksTest-high_school_government_and_politics|5": {
"acc": 0.844559585492228,
"acc_stderr": 0.026148483469153314,
"acc_norm": 0.844559585492228,
"acc_norm_stderr": 0.026148483469153314
},
"harness|hendrycksTest-high_school_macroeconomics|5": {
"acc": 0.5564102564102564,
"acc_stderr": 0.0251891498947642,
"acc_norm": 0.5564102564102564,
"acc_norm_stderr": 0.0251891498947642
},
"harness|hendrycksTest-high_school_mathematics|5": {
"acc": 0.337037037037037,
"acc_stderr": 0.028820884666253255,
"acc_norm": 0.337037037037037,
"acc_norm_stderr": 0.028820884666253255
},
"harness|hendrycksTest-high_school_microeconomics|5": {
"acc": 0.634453781512605,
"acc_stderr": 0.031282177063684614,
"acc_norm": 0.634453781512605,
"acc_norm_stderr": 0.031282177063684614
},
"harness|hendrycksTest-high_school_physics|5": {
"acc": 0.3443708609271523,
"acc_stderr": 0.038796870240733264,
"acc_norm": 0.3443708609271523,
"acc_norm_stderr": 0.038796870240733264
},
"harness|hendrycksTest-high_school_psychology|5": {
"acc": 0.8018348623853211,
"acc_stderr": 0.017090573804217905,
"acc_norm": 0.8018348623853211,
"acc_norm_stderr": 0.017090573804217905
},
"harness|hendrycksTest-high_school_statistics|5": {
"acc": 0.4398148148148148,
"acc_stderr": 0.03385177976044812,
"acc_norm": 0.4398148148148148,
"acc_norm_stderr": 0.03385177976044812
},
"harness|hendrycksTest-high_school_us_history|5": {
"acc": 0.7549019607843137,
"acc_stderr": 0.03019028245350195,
"acc_norm": 0.7549019607843137,
"acc_norm_stderr": 0.03019028245350195
},
"harness|hendrycksTest-high_school_world_history|5": {
"acc": 0.7510548523206751,
"acc_stderr": 0.028146970599422644,
"acc_norm": 0.7510548523206751,
"acc_norm_stderr": 0.028146970599422644
},
"harness|hendrycksTest-human_aging|5": {
"acc": 0.6322869955156951,
"acc_stderr": 0.03236198350928275,
"acc_norm": 0.6322869955156951,
"acc_norm_stderr": 0.03236198350928275
},
"harness|hendrycksTest-human_sexuality|5": {
"acc": 0.6946564885496184,
"acc_stderr": 0.040393149787245605,
"acc_norm": 0.6946564885496184,
"acc_norm_stderr": 0.040393149787245605
},
"harness|hendrycksTest-international_law|5": {
"acc": 0.7933884297520661,
"acc_stderr": 0.03695980128098824,
"acc_norm": 0.7933884297520661,
"acc_norm_stderr": 0.03695980128098824
},
"harness|hendrycksTest-jurisprudence|5": {
"acc": 0.7037037037037037,
"acc_stderr": 0.04414343666854933,
"acc_norm": 0.7037037037037037,
"acc_norm_stderr": 0.04414343666854933
},
"harness|hendrycksTest-logical_fallacies|5": {
"acc": 0.7300613496932515,
"acc_stderr": 0.03487825168497892,
"acc_norm": 0.7300613496932515,
"acc_norm_stderr": 0.03487825168497892
},
"harness|hendrycksTest-machine_learning|5": {
"acc": 0.45535714285714285,
"acc_stderr": 0.04726835553719099,
"acc_norm": 0.45535714285714285,
"acc_norm_stderr": 0.04726835553719099
},
"harness|hendrycksTest-management|5": {
"acc": 0.7475728155339806,
"acc_stderr": 0.04301250399690878,
"acc_norm": 0.7475728155339806,
"acc_norm_stderr": 0.04301250399690878
},
"harness|hendrycksTest-marketing|5": {
"acc": 0.8589743589743589,
"acc_stderr": 0.022801382534597552,
"acc_norm": 0.8589743589743589,
"acc_norm_stderr": 0.022801382534597552
},
"harness|hendrycksTest-medical_genetics|5": {
"acc": 0.68,
"acc_stderr": 0.046882617226215034,
"acc_norm": 0.68,
"acc_norm_stderr": 0.046882617226215034
},
"harness|hendrycksTest-miscellaneous|5": {
"acc": 0.7739463601532567,
"acc_stderr": 0.014957458504335842,
"acc_norm": 0.7739463601532567,
"acc_norm_stderr": 0.014957458504335842
},
"harness|hendrycksTest-moral_disputes|5": {
"acc": 0.6676300578034682,
"acc_stderr": 0.025361168749688225,
"acc_norm": 0.6676300578034682,
"acc_norm_stderr": 0.025361168749688225
},
"harness|hendrycksTest-moral_scenarios|5": {
"acc": 0.34972067039106147,
"acc_stderr": 0.01594930879023364,
"acc_norm": 0.34972067039106147,
"acc_norm_stderr": 0.01594930879023364
},
"harness|hendrycksTest-nutrition|5": {
"acc": 0.6797385620915033,
"acc_stderr": 0.02671611838015685,
"acc_norm": 0.6797385620915033,
"acc_norm_stderr": 0.02671611838015685
},
"harness|hendrycksTest-philosophy|5": {
"acc": 0.6752411575562701,
"acc_stderr": 0.026596782287697043,
"acc_norm": 0.6752411575562701,
"acc_norm_stderr": 0.026596782287697043
},
"harness|hendrycksTest-prehistory|5": {
"acc": 0.6759259259259259,
"acc_stderr": 0.02604176620271716,
"acc_norm": 0.6759259259259259,
"acc_norm_stderr": 0.02604176620271716
},
"harness|hendrycksTest-professional_accounting|5": {
"acc": 0.450354609929078,
"acc_stderr": 0.029680105565029036,
"acc_norm": 0.450354609929078,
"acc_norm_stderr": 0.029680105565029036
},
"harness|hendrycksTest-professional_law|5": {
"acc": 0.42698826597131684,
"acc_stderr": 0.012633353557534427,
"acc_norm": 0.42698826597131684,
"acc_norm_stderr": 0.012633353557534427
},
"harness|hendrycksTest-professional_medicine|5": {
"acc": 0.5992647058823529,
"acc_stderr": 0.029768263528933105,
"acc_norm": 0.5992647058823529,
"acc_norm_stderr": 0.029768263528933105
},
"harness|hendrycksTest-professional_psychology|5": {
"acc": 0.6111111111111112,
"acc_stderr": 0.019722058939618068,
"acc_norm": 0.6111111111111112,
"acc_norm_stderr": 0.019722058939618068
},
"harness|hendrycksTest-public_relations|5": {
"acc": 0.7,
"acc_stderr": 0.04389311454644287,
"acc_norm": 0.7,
"acc_norm_stderr": 0.04389311454644287
},
"harness|hendrycksTest-security_studies|5": {
"acc": 0.7346938775510204,
"acc_stderr": 0.0282638899437846,
"acc_norm": 0.7346938775510204,
"acc_norm_stderr": 0.0282638899437846
},
"harness|hendrycksTest-sociology|5": {
"acc": 0.7512437810945274,
"acc_stderr": 0.030567675938916714,
"acc_norm": 0.7512437810945274,
"acc_norm_stderr": 0.030567675938916714
},
"harness|hendrycksTest-us_foreign_policy|5": {
"acc": 0.81,
"acc_stderr": 0.039427724440366255,
"acc_norm": 0.81,
"acc_norm_stderr": 0.039427724440366255
},
"harness|hendrycksTest-virology|5": {
"acc": 0.5120481927710844,
"acc_stderr": 0.03891364495835816,
"acc_norm": 0.5120481927710844,
"acc_norm_stderr": 0.03891364495835816
},
"harness|hendrycksTest-world_religions|5": {
"acc": 0.8245614035087719,
"acc_stderr": 0.029170885500727665,
"acc_norm": 0.8245614035087719,
"acc_norm_stderr": 0.029170885500727665
},
"harness|truthfulqa:mc|0": {
"mc1": 0.5226438188494492,
"mc1_stderr": 0.01748554225848964,
"mc2": 0.6766513448639357,
"mc2_stderr": 0.015264009667659464
},
"harness|winogrande|5": {
"acc": 0.7679558011049724,
"acc_stderr": 0.011864149691827936
},
"harness|gsm8k|5": {
"acc": 0.3957543593631539,
"acc_stderr": 0.013469823701048815
}
}
```
## Dataset Details
### Dataset Description
<!-- Provide a longer summary of what this dataset is. -->
- **Curated by:** [More Information Needed]
- **Funded by [optional]:** [More Information Needed]
- **Shared by [optional]:** [More Information Needed]
- **Language(s) (NLP):** [More Information Needed]
- **License:** [More Information Needed]
### Dataset Sources [optional]
<!-- Provide the basic links for the dataset. -->
- **Repository:** [More Information Needed]
- **Paper [optional]:** [More Information Needed]
- **Demo [optional]:** [More Information Needed]
## Uses
<!-- Address questions around how the dataset is intended to be used. -->
### Direct Use
<!-- This section describes suitable use cases for the dataset. -->
[More Information Needed]
### Out-of-Scope Use
<!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. -->
[More Information Needed]
## Dataset Structure
<!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. -->
[More Information Needed]
## Dataset Creation
### Curation Rationale
<!-- Motivation for the creation of this dataset. -->
[More Information Needed]
### Source Data
<!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). -->
#### Data Collection and Processing
<!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. -->
[More Information Needed]
#### Who are the source data producers?
<!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. -->
[More Information Needed]
### Annotations [optional]
<!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. -->
#### Annotation process
<!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. -->
[More Information Needed]
#### Who are the annotators?
<!-- This section describes the people or systems who created the annotations. -->
[More Information Needed]
#### Personal and Sensitive Information
<!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. -->
[More Information Needed]
## Bias, Risks, and Limitations
<!-- This section is meant to convey both technical and sociotechnical limitations. -->
[More Information Needed]
### Recommendations
<!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. -->
Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.
## Citation [optional]
<!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. -->
**BibTeX:**
[More Information Needed]
**APA:**
[More Information Needed]
## Glossary [optional]
<!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. -->
[More Information Needed]
## More Information [optional]
[More Information Needed]
## Dataset Card Authors [optional]
[More Information Needed]
## Dataset Card Contact
[More Information Needed] | open-llm-leaderboard/details_BarraHome__Wistral-7B-Instruct-v0.4 | [
"region:us"
] | 2024-02-15T20:38:05+00:00 | {"pretty_name": "Evaluation run of BarraHome/Wistral-7B-Instruct-v0.4", "dataset_summary": "Dataset automatically created during the evaluation run of model [BarraHome/Wistral-7B-Instruct-v0.4](https://huggingface.co/BarraHome/Wistral-7B-Instruct-v0.4) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_BarraHome__Wistral-7B-Instruct-v0.4\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-02-15T20:35:44.878136](https://huggingface.co/datasets/open-llm-leaderboard/details_BarraHome__Wistral-7B-Instruct-v0.4/blob/main/results_2024-02-15T20-35-44.878136.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.6032184784518743,\n \"acc_stderr\": 0.03333730204729809,\n \"acc_norm\": 0.607891645213564,\n \"acc_norm_stderr\": 0.03401402537730786,\n \"mc1\": 0.5226438188494492,\n \"mc1_stderr\": 0.01748554225848964,\n \"mc2\": 0.6766513448639357,\n \"mc2_stderr\": 0.015264009667659464\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.575938566552901,\n \"acc_stderr\": 0.014441889627464392,\n \"acc_norm\": 0.6220136518771331,\n \"acc_norm_stderr\": 0.0141696645203031\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6612228639713205,\n \"acc_stderr\": 0.004723266971563391,\n \"acc_norm\": 0.8481378211511651,\n \"acc_norm_stderr\": 0.0035815378475817935\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.36,\n \"acc_stderr\": 0.04824181513244218,\n \"acc_norm\": 0.36,\n \"acc_norm_stderr\": 0.04824181513244218\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.5777777777777777,\n \"acc_stderr\": 0.04266763404099582,\n \"acc_norm\": 0.5777777777777777,\n \"acc_norm_stderr\": 0.04266763404099582\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.625,\n \"acc_stderr\": 0.039397364351956274,\n \"acc_norm\": 0.625,\n \"acc_norm_stderr\": 0.039397364351956274\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.58,\n \"acc_stderr\": 0.049604496374885836,\n \"acc_norm\": 0.58,\n \"acc_norm_stderr\": 0.049604496374885836\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.6754716981132075,\n \"acc_stderr\": 0.02881561571343211,\n \"acc_norm\": 0.6754716981132075,\n \"acc_norm_stderr\": 0.02881561571343211\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.6597222222222222,\n \"acc_stderr\": 0.039621355734862175,\n \"acc_norm\": 0.6597222222222222,\n \"acc_norm_stderr\": 0.039621355734862175\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.37,\n \"acc_stderr\": 0.04852365870939099,\n \"acc_norm\": 0.37,\n \"acc_norm_stderr\": 0.04852365870939099\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.47,\n \"acc_stderr\": 0.05016135580465919,\n \"acc_norm\": 0.47,\n \"acc_norm_stderr\": 0.05016135580465919\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.38,\n \"acc_stderr\": 0.048783173121456316,\n \"acc_norm\": 0.38,\n \"acc_norm_stderr\": 0.048783173121456316\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.5838150289017341,\n \"acc_stderr\": 0.03758517775404947,\n \"acc_norm\": 0.5838150289017341,\n \"acc_norm_stderr\": 0.03758517775404947\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.43137254901960786,\n \"acc_stderr\": 0.04928099597287534,\n \"acc_norm\": 0.43137254901960786,\n \"acc_norm_stderr\": 0.04928099597287534\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.72,\n \"acc_stderr\": 0.04512608598542128,\n \"acc_norm\": 0.72,\n \"acc_norm_stderr\": 0.04512608598542128\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.5148936170212766,\n \"acc_stderr\": 0.03267151848924777,\n \"acc_norm\": 0.5148936170212766,\n \"acc_norm_stderr\": 0.03267151848924777\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.43859649122807015,\n \"acc_stderr\": 0.04668000738510455,\n \"acc_norm\": 0.43859649122807015,\n \"acc_norm_stderr\": 0.04668000738510455\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.5724137931034483,\n \"acc_stderr\": 0.041227371113703316,\n \"acc_norm\": 0.5724137931034483,\n \"acc_norm_stderr\": 0.041227371113703316\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.38095238095238093,\n \"acc_stderr\": 0.025010749116137602,\n \"acc_norm\": 0.38095238095238093,\n \"acc_norm_stderr\": 0.025010749116137602\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.3888888888888889,\n \"acc_stderr\": 0.04360314860077459,\n \"acc_norm\": 0.3888888888888889,\n \"acc_norm_stderr\": 0.04360314860077459\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.41,\n \"acc_stderr\": 0.049431107042371025,\n \"acc_norm\": 0.41,\n \"acc_norm_stderr\": 0.049431107042371025\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.6774193548387096,\n \"acc_stderr\": 0.026593084516572277,\n \"acc_norm\": 0.6774193548387096,\n \"acc_norm_stderr\": 0.026593084516572277\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.4975369458128079,\n \"acc_stderr\": 0.03517945038691063,\n \"acc_norm\": 0.4975369458128079,\n \"acc_norm_stderr\": 0.03517945038691063\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.61,\n \"acc_stderr\": 0.04902071300001975,\n \"acc_norm\": 0.61,\n \"acc_norm_stderr\": 0.04902071300001975\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.7212121212121212,\n \"acc_stderr\": 0.03501438706296781,\n \"acc_norm\": 0.7212121212121212,\n \"acc_norm_stderr\": 0.03501438706296781\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.7474747474747475,\n \"acc_stderr\": 0.030954055470365897,\n \"acc_norm\": 0.7474747474747475,\n \"acc_norm_stderr\": 0.030954055470365897\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.844559585492228,\n \"acc_stderr\": 0.026148483469153314,\n \"acc_norm\": 0.844559585492228,\n \"acc_norm_stderr\": 0.026148483469153314\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.5564102564102564,\n \"acc_stderr\": 0.0251891498947642,\n \"acc_norm\": 0.5564102564102564,\n \"acc_norm_stderr\": 0.0251891498947642\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.337037037037037,\n \"acc_stderr\": 0.028820884666253255,\n \"acc_norm\": 0.337037037037037,\n \"acc_norm_stderr\": 0.028820884666253255\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.634453781512605,\n \"acc_stderr\": 0.031282177063684614,\n \"acc_norm\": 0.634453781512605,\n \"acc_norm_stderr\": 0.031282177063684614\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.3443708609271523,\n \"acc_stderr\": 0.038796870240733264,\n \"acc_norm\": 0.3443708609271523,\n \"acc_norm_stderr\": 0.038796870240733264\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.8018348623853211,\n \"acc_stderr\": 0.017090573804217905,\n \"acc_norm\": 0.8018348623853211,\n \"acc_norm_stderr\": 0.017090573804217905\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.4398148148148148,\n \"acc_stderr\": 0.03385177976044812,\n \"acc_norm\": 0.4398148148148148,\n \"acc_norm_stderr\": 0.03385177976044812\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.7549019607843137,\n \"acc_stderr\": 0.03019028245350195,\n \"acc_norm\": 0.7549019607843137,\n \"acc_norm_stderr\": 0.03019028245350195\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.7510548523206751,\n \"acc_stderr\": 0.028146970599422644,\n \"acc_norm\": 0.7510548523206751,\n \"acc_norm_stderr\": 0.028146970599422644\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.6322869955156951,\n \"acc_stderr\": 0.03236198350928275,\n \"acc_norm\": 0.6322869955156951,\n \"acc_norm_stderr\": 0.03236198350928275\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.6946564885496184,\n \"acc_stderr\": 0.040393149787245605,\n \"acc_norm\": 0.6946564885496184,\n \"acc_norm_stderr\": 0.040393149787245605\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.7933884297520661,\n \"acc_stderr\": 0.03695980128098824,\n \"acc_norm\": 0.7933884297520661,\n \"acc_norm_stderr\": 0.03695980128098824\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.7037037037037037,\n \"acc_stderr\": 0.04414343666854933,\n \"acc_norm\": 0.7037037037037037,\n \"acc_norm_stderr\": 0.04414343666854933\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.7300613496932515,\n \"acc_stderr\": 0.03487825168497892,\n \"acc_norm\": 0.7300613496932515,\n \"acc_norm_stderr\": 0.03487825168497892\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.45535714285714285,\n \"acc_stderr\": 0.04726835553719099,\n \"acc_norm\": 0.45535714285714285,\n \"acc_norm_stderr\": 0.04726835553719099\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.7475728155339806,\n \"acc_stderr\": 0.04301250399690878,\n \"acc_norm\": 0.7475728155339806,\n \"acc_norm_stderr\": 0.04301250399690878\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8589743589743589,\n \"acc_stderr\": 0.022801382534597552,\n \"acc_norm\": 0.8589743589743589,\n \"acc_norm_stderr\": 0.022801382534597552\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.68,\n \"acc_stderr\": 0.046882617226215034,\n \"acc_norm\": 0.68,\n \"acc_norm_stderr\": 0.046882617226215034\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.7739463601532567,\n \"acc_stderr\": 0.014957458504335842,\n \"acc_norm\": 0.7739463601532567,\n \"acc_norm_stderr\": 0.014957458504335842\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.6676300578034682,\n \"acc_stderr\": 0.025361168749688225,\n \"acc_norm\": 0.6676300578034682,\n \"acc_norm_stderr\": 0.025361168749688225\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.34972067039106147,\n \"acc_stderr\": 0.01594930879023364,\n \"acc_norm\": 0.34972067039106147,\n \"acc_norm_stderr\": 0.01594930879023364\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.6797385620915033,\n \"acc_stderr\": 0.02671611838015685,\n \"acc_norm\": 0.6797385620915033,\n \"acc_norm_stderr\": 0.02671611838015685\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.6752411575562701,\n \"acc_stderr\": 0.026596782287697043,\n \"acc_norm\": 0.6752411575562701,\n \"acc_norm_stderr\": 0.026596782287697043\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.6759259259259259,\n \"acc_stderr\": 0.02604176620271716,\n \"acc_norm\": 0.6759259259259259,\n \"acc_norm_stderr\": 0.02604176620271716\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.450354609929078,\n \"acc_stderr\": 0.029680105565029036,\n \"acc_norm\": 0.450354609929078,\n \"acc_norm_stderr\": 0.029680105565029036\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.42698826597131684,\n \"acc_stderr\": 0.012633353557534427,\n \"acc_norm\": 0.42698826597131684,\n \"acc_norm_stderr\": 0.012633353557534427\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.5992647058823529,\n \"acc_stderr\": 0.029768263528933105,\n \"acc_norm\": 0.5992647058823529,\n \"acc_norm_stderr\": 0.029768263528933105\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.6111111111111112,\n \"acc_stderr\": 0.019722058939618068,\n \"acc_norm\": 0.6111111111111112,\n \"acc_norm_stderr\": 0.019722058939618068\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.7,\n \"acc_stderr\": 0.04389311454644287,\n \"acc_norm\": 0.7,\n \"acc_norm_stderr\": 0.04389311454644287\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.7346938775510204,\n \"acc_stderr\": 0.0282638899437846,\n \"acc_norm\": 0.7346938775510204,\n \"acc_norm_stderr\": 0.0282638899437846\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.7512437810945274,\n \"acc_stderr\": 0.030567675938916714,\n \"acc_norm\": 0.7512437810945274,\n \"acc_norm_stderr\": 0.030567675938916714\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.81,\n \"acc_stderr\": 0.039427724440366255,\n \"acc_norm\": 0.81,\n \"acc_norm_stderr\": 0.039427724440366255\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5120481927710844,\n \"acc_stderr\": 0.03891364495835816,\n \"acc_norm\": 0.5120481927710844,\n \"acc_norm_stderr\": 0.03891364495835816\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.8245614035087719,\n \"acc_stderr\": 0.029170885500727665,\n \"acc_norm\": 0.8245614035087719,\n \"acc_norm_stderr\": 0.029170885500727665\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.5226438188494492,\n \"mc1_stderr\": 0.01748554225848964,\n \"mc2\": 0.6766513448639357,\n \"mc2_stderr\": 0.015264009667659464\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7679558011049724,\n \"acc_stderr\": 0.011864149691827936\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.3957543593631539,\n \"acc_stderr\": 0.013469823701048815\n }\n}\n```", "repo_url": "https://huggingface.co/BarraHome/Wistral-7B-Instruct-v0.4", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_02_15T20_35_44.878136", "path": ["**/details_harness|arc:challenge|25_2024-02-15T20-35-44.878136.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-02-15T20-35-44.878136.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_02_15T20_35_44.878136", "path": ["**/details_harness|gsm8k|5_2024-02-15T20-35-44.878136.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-02-15T20-35-44.878136.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_02_15T20_35_44.878136", "path": ["**/details_harness|hellaswag|10_2024-02-15T20-35-44.878136.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-02-15T20-35-44.878136.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_02_15T20_35_44.878136", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-15T20-35-44.878136.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-02-15T20-35-44.878136.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-02-15T20-35-44.878136.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-02-15T20-35-44.878136.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-15T20-35-44.878136.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-02-15T20-35-44.878136.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-02-15T20-35-44.878136.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-02-15T20-35-44.878136.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-02-15T20-35-44.878136.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-02-15T20-35-44.878136.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-02-15T20-35-44.878136.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-02-15T20-35-44.878136.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-15T20-35-44.878136.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-02-15T20-35-44.878136.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-15T20-35-44.878136.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-15T20-35-44.878136.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-02-15T20-35-44.878136.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-02-15T20-35-44.878136.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-02-15T20-35-44.878136.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-15T20-35-44.878136.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-15T20-35-44.878136.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-15T20-35-44.878136.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-02-15T20-35-44.878136.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-15T20-35-44.878136.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-15T20-35-44.878136.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-15T20-35-44.878136.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-15T20-35-44.878136.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-02-15T20-35-44.878136.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-15T20-35-44.878136.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-15T20-35-44.878136.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-15T20-35-44.878136.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-15T20-35-44.878136.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-02-15T20-35-44.878136.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-02-15T20-35-44.878136.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-02-15T20-35-44.878136.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-02-15T20-35-44.878136.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-15T20-35-44.878136.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-02-15T20-35-44.878136.parquet", "**/details_harness|hendrycksTest-management|5_2024-02-15T20-35-44.878136.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-02-15T20-35-44.878136.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-02-15T20-35-44.878136.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-02-15T20-35-44.878136.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-02-15T20-35-44.878136.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-15T20-35-44.878136.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-02-15T20-35-44.878136.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-02-15T20-35-44.878136.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-02-15T20-35-44.878136.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-02-15T20-35-44.878136.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-02-15T20-35-44.878136.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-02-15T20-35-44.878136.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-02-15T20-35-44.878136.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-02-15T20-35-44.878136.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-02-15T20-35-44.878136.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-02-15T20-35-44.878136.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-15T20-35-44.878136.parquet", "**/details_harness|hendrycksTest-virology|5_2024-02-15T20-35-44.878136.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-02-15T20-35-44.878136.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-15T20-35-44.878136.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-02-15T20-35-44.878136.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-02-15T20-35-44.878136.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-02-15T20-35-44.878136.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-15T20-35-44.878136.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-02-15T20-35-44.878136.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-02-15T20-35-44.878136.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-02-15T20-35-44.878136.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-02-15T20-35-44.878136.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-02-15T20-35-44.878136.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-02-15T20-35-44.878136.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-02-15T20-35-44.878136.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-15T20-35-44.878136.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-02-15T20-35-44.878136.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-15T20-35-44.878136.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-15T20-35-44.878136.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-02-15T20-35-44.878136.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-02-15T20-35-44.878136.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-02-15T20-35-44.878136.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-15T20-35-44.878136.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-15T20-35-44.878136.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-15T20-35-44.878136.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-02-15T20-35-44.878136.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-15T20-35-44.878136.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-15T20-35-44.878136.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-15T20-35-44.878136.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-15T20-35-44.878136.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-02-15T20-35-44.878136.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-15T20-35-44.878136.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-15T20-35-44.878136.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-15T20-35-44.878136.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-15T20-35-44.878136.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-02-15T20-35-44.878136.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-02-15T20-35-44.878136.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-02-15T20-35-44.878136.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-02-15T20-35-44.878136.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-15T20-35-44.878136.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-02-15T20-35-44.878136.parquet", "**/details_harness|hendrycksTest-management|5_2024-02-15T20-35-44.878136.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-02-15T20-35-44.878136.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-02-15T20-35-44.878136.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-02-15T20-35-44.878136.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-02-15T20-35-44.878136.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-15T20-35-44.878136.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-02-15T20-35-44.878136.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-02-15T20-35-44.878136.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-02-15T20-35-44.878136.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-02-15T20-35-44.878136.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-02-15T20-35-44.878136.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-02-15T20-35-44.878136.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-02-15T20-35-44.878136.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-02-15T20-35-44.878136.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-02-15T20-35-44.878136.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-02-15T20-35-44.878136.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-15T20-35-44.878136.parquet", "**/details_harness|hendrycksTest-virology|5_2024-02-15T20-35-44.878136.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-02-15T20-35-44.878136.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_02_15T20_35_44.878136", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-15T20-35-44.878136.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-15T20-35-44.878136.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_02_15T20_35_44.878136", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-02-15T20-35-44.878136.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-02-15T20-35-44.878136.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_02_15T20_35_44.878136", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-02-15T20-35-44.878136.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-02-15T20-35-44.878136.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_02_15T20_35_44.878136", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-02-15T20-35-44.878136.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-02-15T20-35-44.878136.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_02_15T20_35_44.878136", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-15T20-35-44.878136.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-15T20-35-44.878136.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_02_15T20_35_44.878136", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-02-15T20-35-44.878136.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-02-15T20-35-44.878136.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_02_15T20_35_44.878136", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-02-15T20-35-44.878136.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-02-15T20-35-44.878136.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_02_15T20_35_44.878136", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-02-15T20-35-44.878136.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-02-15T20-35-44.878136.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_02_15T20_35_44.878136", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-02-15T20-35-44.878136.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-02-15T20-35-44.878136.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_02_15T20_35_44.878136", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-02-15T20-35-44.878136.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-02-15T20-35-44.878136.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_02_15T20_35_44.878136", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-02-15T20-35-44.878136.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-02-15T20-35-44.878136.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_02_15T20_35_44.878136", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-02-15T20-35-44.878136.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-02-15T20-35-44.878136.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_02_15T20_35_44.878136", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-15T20-35-44.878136.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-15T20-35-44.878136.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_02_15T20_35_44.878136", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-02-15T20-35-44.878136.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-02-15T20-35-44.878136.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_02_15T20_35_44.878136", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-15T20-35-44.878136.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-15T20-35-44.878136.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_02_15T20_35_44.878136", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-15T20-35-44.878136.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-15T20-35-44.878136.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_02_15T20_35_44.878136", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-02-15T20-35-44.878136.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-02-15T20-35-44.878136.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_02_15T20_35_44.878136", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-02-15T20-35-44.878136.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-02-15T20-35-44.878136.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_02_15T20_35_44.878136", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-02-15T20-35-44.878136.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-02-15T20-35-44.878136.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_02_15T20_35_44.878136", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-15T20-35-44.878136.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-15T20-35-44.878136.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_02_15T20_35_44.878136", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-15T20-35-44.878136.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-15T20-35-44.878136.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_02_15T20_35_44.878136", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-15T20-35-44.878136.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-15T20-35-44.878136.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_02_15T20_35_44.878136", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-02-15T20-35-44.878136.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-02-15T20-35-44.878136.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_02_15T20_35_44.878136", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-15T20-35-44.878136.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-15T20-35-44.878136.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_02_15T20_35_44.878136", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-15T20-35-44.878136.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-15T20-35-44.878136.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_02_15T20_35_44.878136", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-15T20-35-44.878136.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-15T20-35-44.878136.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_02_15T20_35_44.878136", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-15T20-35-44.878136.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-15T20-35-44.878136.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_02_15T20_35_44.878136", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-02-15T20-35-44.878136.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-02-15T20-35-44.878136.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_02_15T20_35_44.878136", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-15T20-35-44.878136.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-15T20-35-44.878136.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_02_15T20_35_44.878136", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-15T20-35-44.878136.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-15T20-35-44.878136.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_02_15T20_35_44.878136", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-15T20-35-44.878136.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-15T20-35-44.878136.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_02_15T20_35_44.878136", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-15T20-35-44.878136.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-15T20-35-44.878136.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_02_15T20_35_44.878136", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-02-15T20-35-44.878136.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-02-15T20-35-44.878136.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_02_15T20_35_44.878136", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-02-15T20-35-44.878136.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-02-15T20-35-44.878136.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_02_15T20_35_44.878136", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-02-15T20-35-44.878136.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-02-15T20-35-44.878136.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_02_15T20_35_44.878136", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-02-15T20-35-44.878136.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-02-15T20-35-44.878136.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_02_15T20_35_44.878136", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-15T20-35-44.878136.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-15T20-35-44.878136.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_02_15T20_35_44.878136", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-02-15T20-35-44.878136.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-02-15T20-35-44.878136.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_02_15T20_35_44.878136", "path": ["**/details_harness|hendrycksTest-management|5_2024-02-15T20-35-44.878136.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-02-15T20-35-44.878136.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_02_15T20_35_44.878136", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-02-15T20-35-44.878136.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-02-15T20-35-44.878136.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_02_15T20_35_44.878136", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-02-15T20-35-44.878136.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-02-15T20-35-44.878136.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_02_15T20_35_44.878136", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-02-15T20-35-44.878136.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-02-15T20-35-44.878136.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_02_15T20_35_44.878136", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-02-15T20-35-44.878136.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-02-15T20-35-44.878136.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_02_15T20_35_44.878136", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-15T20-35-44.878136.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-15T20-35-44.878136.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_02_15T20_35_44.878136", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-02-15T20-35-44.878136.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-02-15T20-35-44.878136.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_02_15T20_35_44.878136", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-02-15T20-35-44.878136.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-02-15T20-35-44.878136.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_02_15T20_35_44.878136", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-02-15T20-35-44.878136.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-02-15T20-35-44.878136.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_02_15T20_35_44.878136", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-02-15T20-35-44.878136.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-02-15T20-35-44.878136.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_02_15T20_35_44.878136", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-02-15T20-35-44.878136.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-02-15T20-35-44.878136.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_02_15T20_35_44.878136", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-02-15T20-35-44.878136.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-02-15T20-35-44.878136.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_02_15T20_35_44.878136", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-02-15T20-35-44.878136.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-02-15T20-35-44.878136.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_02_15T20_35_44.878136", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-02-15T20-35-44.878136.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-02-15T20-35-44.878136.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_02_15T20_35_44.878136", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-02-15T20-35-44.878136.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-02-15T20-35-44.878136.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_02_15T20_35_44.878136", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-02-15T20-35-44.878136.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-02-15T20-35-44.878136.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_02_15T20_35_44.878136", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-15T20-35-44.878136.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-15T20-35-44.878136.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_02_15T20_35_44.878136", "path": ["**/details_harness|hendrycksTest-virology|5_2024-02-15T20-35-44.878136.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-02-15T20-35-44.878136.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_02_15T20_35_44.878136", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-02-15T20-35-44.878136.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-02-15T20-35-44.878136.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_02_15T20_35_44.878136", "path": ["**/details_harness|truthfulqa:mc|0_2024-02-15T20-35-44.878136.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-02-15T20-35-44.878136.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_02_15T20_35_44.878136", "path": ["**/details_harness|winogrande|5_2024-02-15T20-35-44.878136.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-02-15T20-35-44.878136.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_02_15T20_35_44.878136", "path": ["results_2024-02-15T20-35-44.878136.parquet"]}, {"split": "latest", "path": ["results_2024-02-15T20-35-44.878136.parquet"]}]}]} | 2024-02-15T20:38:32+00:00 |
0cebb47837c041c119e279e1ced85778416ef515 | thecaipirinhachannel/serran | [
"region:us"
] | 2024-02-15T20:38:28+00:00 | {} | 2024-02-15T20:39:18+00:00 |
|
3111b14a735e48aaa9d5f39aaa33856b8aaba4d2 | RadAlienware/test1ultrachat | [
"license:mit",
"region:us"
] | 2024-02-15T20:46:09+00:00 | {"license": "mit", "dataset_info": {"features": [{"name": "Content", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 5722, "num_examples": 1}, {"name": "test", "num_bytes": 5324, "num_examples": 1}], "download_size": 5524, "dataset_size": 11046}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}, {"split": "test", "path": "data/test-*"}]}]} | 2024-02-15T21:00:00+00:00 |
|
2f65c90484ced4b4482a0403eaed26a18fb5765c | YingJie0202/test | [
"region:us"
] | 2024-02-15T20:52:56+00:00 | {} | 2024-02-15T21:20:00+00:00 |
|
7bd1bd18f2807a42d04caddaf7f90ef309f25983 | xPXXX/test_ragas | [
"license:mit",
"region:us"
] | 2024-02-15T21:07:10+00:00 | {"license": "mit"} | 2024-02-16T00:51:44+00:00 |
|
418dfbba1351ac18742a1bc8f7428d5fbc0150c8 |
Dataset for human eval infill for java, based on https://arxiv.org/pdf/2207.14255
| njkumarr/humanevalinfilljava | [
"language:en",
"arxiv:2207.14255",
"region:us"
] | 2024-02-15T21:25:22+00:00 | {"language": ["en"], "pretty_name": "HumanEval-Infilling Java"} | 2024-02-16T07:43:34+00:00 |
8ff91a7ff0389de4e31636e8305fddc3e3a2df35 | unigram/fol-04 | [
"region:us"
] | 2024-02-15T21:29:12+00:00 | {"dataset_info": {"features": [{"name": "premise", "dtype": "string"}, {"name": "hypothesis", "dtype": "string"}, {"name": "label", "dtype": "string"}, {"name": "premise_tptp", "dtype": "string"}, {"name": "hypothesis_tptp", "dtype": "string"}, {"name": "deberta_pred", "dtype": "string"}, {"name": "deberta_pred_r1", "dtype": "string"}, {"name": "deberta_pred_r2", "dtype": "string"}, {"name": "deberta_pred_r3", "dtype": "string"}, {"name": "deberta_pred_r4", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 2197971, "num_examples": 1989}, {"name": "validation", "num_bytes": 387374, "num_examples": 375}, {"name": "test", "num_bytes": 370138, "num_examples": 339}], "download_size": 953363, "dataset_size": 2955483}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}, {"split": "validation", "path": "data/validation-*"}, {"split": "test", "path": "data/test-*"}]}]} | 2024-02-15T21:29:15+00:00 |
|
b12fc65cb8d2fd75335ea8ce2fa64a4be9f8fa7c |
## Dataset Description
- **Repository:** [https://github.com/nlp-uoregon/CulturaX](https://github.com/nlp-uoregon/CulturaX)
- **Papers:** [CulturaX: A Cleaned, Enormous, and Multilingual Dataset for Large Language Models in 167 Languages](https://arxiv.org/abs/2309.09400)
## Dataset Summary
We present CulturaX, a substantial multilingual dataset with 6.3 trillion tokens in 167 languages, tailored for large language model (LLM) development. Our dataset undergoes meticulous cleaning and deduplication through a rigorous pipeline of multiple stages to accomplish the best quality for model training, including language identification, URL-based filtering, metric-based cleaning, document refinement, and data deduplication. We employ MinHash at document level to achieve fuzzy deduplication for the datasets in different languages. Our data cleaning framework includes diverse criteria and threshold selections, guided by extensive data samples, ensuring comprehensive noise filtering in various aspects. CulturaX is fully released to the public in HuggingFace to facilitate research and advancements in multilingual LLMs.
Our dataset combines the most recent iteration of mC4 (version 3.1.0) [1] with all accessible OSCAR corpora up to the present year, including 20.19, 21.09, 22.01, and 23.01 [2]. After deep cleaning and deduplication, CulturaX involves 16TB data in the parquet format (expanding to 27TB when unpacked). More than a half of our dataset is dedicated to non-English languages to significantly boost the data size and enhance the feasibility of training models in multilingual scenarios.
To obtain perplexity scores for data cleaning, we train a SentencePiece tokenizer and 5-gram Kneser-Ney language models as provided in the KenLM library [3] using the 20230501 dumps of Wikipedia. Our KenLM models are also released in HuggingFace: https://huggingface.co/uonlp/kenlm.
Details for the dataset can be found in our technical paper: [https://arxiv.org/abs/2309.09400](https://arxiv.org/abs/2309.09400)
You can download the dataset using Hugging Face datasets:
*You may need to follow these instructions to setup authentication before downloading the dataset: [https://huggingface.co/docs/huggingface_hub/quick-start#login](https://huggingface.co/docs/huggingface_hub/quick-start#login)*
```python
from datasets import load_dataset
ds = load_dataset("uonlp/CulturaX",
"en",
use_auth_token=True)
```
### Languages
The supported languages and statistics for our dataset can be found below:
*(Note that the language code `als` and `eml` refer to `gsw` and `x-eml` in the OSCAR-2301 dataset.)*
| | Code | Language | # Documents | # Tokens | # Tokens (%) |
|----:|:-------|:-------------------------|:----------------|:--------------------|:------|
| 0 | en | English | 3,241,065,682 | 2,846,970,578,793 | 45.13 |
| 1 | ru | Russian | 799,310,908 | 737,201,800,363 | 11.69 |
| 2 | es | Spanish | 450,937,645 | 373,845,662,394 | 5.93 |
| 3 | de | German | 420,017,484 | 357,030,348,021 | 5.66 |
| 4 | fr | French | 363,754,348 | 319,332,674,695 | 5.06 |
| 5 | zh | Chinese | 218,624,604 | 227,055,380,882 | 3.60 |
| 6 | it | Italian | 211,309,922 | 165,446,410,843 | 2.62 |
| 7 | pt | Portuguese | 190,289,658 | 136,941,763,923 | 2.17 |
| 8 | pl | Polish | 142,167,217 | 117,269,087,143 | 1.86 |
| 9 | ja | Japanese | 111,188,475 | 107,873,841,351 | 1.71 |
| 10 | nl | Dutch | 117,392,666 | 80,032,209,900 | 1.27 |
| 11 | ar | Arabic | 74,027,952 | 69,354,335,076 | 1.10 |
| 12 | tr | Turkish | 94,207,460 | 64,292,787,164 | 1.02 |
| 13 | cs | Czech | 65,350,564 | 56,910,486,745 | 0.90 |
| 14 | vi | Vietnamese | 57,606,341 | 55,380,123,774 | 0.88 |
| 15 | fa | Persian | 59,531,144 | 45,947,657,495 | 0.73 |
| 16 | hu | Hungarian | 44,132,152 | 43,417,981,714 | 0.69 |
| 17 | el | Greek | 51,430,226 | 43,147,590,757 | 0.68 |
| 18 | ro | Romanian | 40,325,424 | 39,647,954,768 | 0.63 |
| 19 | sv | Swedish | 49,709,189 | 38,486,181,494 | 0.61 |
| 20 | uk | Ukrainian | 44,740,545 | 38,226,128,686 | 0.61 |
| 21 | fi | Finnish | 30,467,667 | 28,925,009,180 | 0.46 |
| 22 | ko | Korean | 20,557,310 | 24,765,448,392 | 0.39 |
| 23 | da | Danish | 25,429,808 | 22,921,651,314 | 0.36 |
| 24 | bg | Bulgarian | 24,131,819 | 22,917,954,776 | 0.36 |
| 25 | no | Norwegian | 18,907,310 | 18,426,628,868 | 0.29 |
| 26 | hi | Hindi | 19,665,355 | 16,791,362,871 | 0.27 |
| 27 | sk | Slovak | 18,582,517 | 16,442,669,076 | 0.26 |
| 28 | th | Thai | 20,960,550 | 15,717,374,014 | 0.25 |
| 29 | lt | Lithuanian | 13,339,785 | 14,247,110,836 | 0.23 |
| 30 | ca | Catalan | 15,531,777 | 12,530,288,006 | 0.20 |
| 31 | id | Indonesian | 23,251,368 | 12,062,966,061 | 0.19 |
| 32 | bn | Bangla | 12,436,596 | 9,572,929,804 | 0.15 |
| 33 | et | Estonian | 8,004,753 | 8,805,656,165 | 0.14 |
| 34 | sl | Slovenian | 7,335,378 | 8,007,587,522 | 0.13 |
| 35 | lv | Latvian | 7,136,587 | 7,845,180,319 | 0.12 |
| 36 | he | Hebrew | 4,653,979 | 4,937,152,096 | 0.08 |
| 37 | sr | Serbian | 4,053,166 | 4,619,482,725 | 0.07 |
| 38 | ta | Tamil | 4,728,460 | 4,378,078,610 | 0.07 |
| 39 | sq | Albanian | 5,205,579 | 3,648,893,215 | 0.06 |
| 40 | az | Azerbaijani | 5,084,505 | 3,513,351,967 | 0.06 |
| 41 | kk | Kazakh | 2,733,982 | 2,802,485,195 | 0.04 |
| 42 | ur | Urdu | 2,757,279 | 2,703,052,627 | 0.04 |
| 43 | ka | Georgian | 3,120,321 | 2,617,625,564 | 0.04 |
| 44 | hy | Armenian | 2,964,488 | 2,395,179,284 | 0.04 |
| 45 | is | Icelandic | 2,373,560 | 2,350,592,857 | 0.04 |
| 46 | ml | Malayalam | 2,693,052 | 2,100,556,809 | 0.03 |
| 47 | ne | Nepali | 3,124,040 | 2,061,601,961 | 0.03 |
| 48 | mk | Macedonian | 2,762,807 | 2,003,302,006 | 0.03 |
| 49 | mr | Marathi | 2,266,588 | 1,955,227,796 | 0.03 |
| 50 | mn | Mongolian | 1,928,828 | 1,850,667,656 | 0.03 |
| 51 | be | Belarusian | 1,643,486 | 1,791,473,041 | 0.03 |
| 52 | te | Telugu | 1,822,865 | 1,566,972,146 | 0.02 |
| 53 | gl | Galician | 1,785,963 | 1,382,539,693 | 0.02 |
| 54 | eu | Basque | 1,598,822 | 1,262,066,759 | 0.02 |
| 55 | kn | Kannada | 1,352,142 | 1,242,285,201 | 0.02 |
| 56 | gu | Gujarati | 1,162,878 | 1,131,730,537 | 0.02 |
| 57 | af | Afrikaans | 826,519 | 1,119,009,767 | 0.02 |
| 58 | my | Burmese | 865,575 | 882,606,546 | 0.01 |
| 59 | si | Sinhala | 753,655 | 880,289,097 | 0.01 |
| 60 | eo | Esperanto | 460,088 | 803,948,528 | 0.01 |
| 61 | km | Khmer | 1,013,181 | 746,664,132 | 0.01 |
| 62 | pa | Punjabi | 646,987 | 727,546,145 | 0.01 |
| 63 | cy | Welsh | 549,955 | 576,743,162 | 0.01 |
| 64 | ky | Kyrgyz | 570,922 | 501,442,620 | 0.01 |
| 65 | ga | Irish | 304,251 | 376,947,935 | 0.01 |
| 66 | ps | Pashto | 376,914 | 363,007,770 | 0.01 |
| 67 | am | Amharic | 243,349 | 358,206,762 | 0.01 |
| 68 | ku | Kurdish | 295,314 | 302,990,910 | 0.00 |
| 69 | tl | Filipino | 348,453 | 242,086,456 | 0.00 |
| 70 | yi | Yiddish | 141,156 | 217,584,643 | 0.00 |
| 71 | lo | Lao | 217,842 | 168,256,876 | 0.00 |
| 72 | fy | Western Frisian | 223,268 | 167,193,111 | 0.00 |
| 73 | sd | Sindhi | 109,162 | 147,487,058 | 0.00 |
| 74 | mg | Malagasy | 115,910 | 142,685,412 | 0.00 |
| 75 | or | Odia | 153,461 | 100,323,213 | 0.00 |
| 76 | as | Assamese | 52,627 | 83,787,896 | 0.00 |
| 77 | ug | Uyghur | 47,035 | 77,677,306 | 0.00 |
| 78 | uz | Uzbek | 87,219 | 75,250,787 | 0.00 |
| 79 | la | Latin | 48,968 | 44,176,580 | 0.00 |
| 80 | hr | Croatian | 460,690 | 40,796,811 | 0.00 |
| 81 | sw | Swahili | 66,506 | 30,708,309 | 0.00 |
| 82 | ms | Malay | 238,151 | 19,375,976 | 0.00 |
| 83 | br | Breton | 43,765 | 13,987,037 | 0.00 |
| 84 | sa | Sanskrit | 16,290 | 13,561,367 | 0.00 |
| 85 | gd | Scottish Gaelic | 8,408 | 4,796,485 | 0.00 |
| 86 | su | Sundanese | 1,554 | 1,308,460 | 0.00 |
| 87 | jv | Javanese | 2,058 | 625,429 | 0.00 |
| 88 | tg | Tajik | 483,835 | - | - |
| 89 | ceb | Cebuano | 263,890 | - | - |
| 90 | tt | Tatar | 218,102 | - | - |
| 91 | ckb | Central Kurdish | 172,035 | - | - |
| 92 | lb | Luxembourgish | 165,891 | - | - |
| 93 | mt | Maltese | 151,320 | - | - |
| 94 | nn | Norwegian Nynorsk | 126,083 | - | - |
| 95 | qu | Quechua | 1,202 | 72,101 | 0.00 |
| 96 | ba | Bashkir | 71,957 | - | - |
| 97 | arz | Egyptian Arabic | 71,625 | - | - |
| 98 | dv | Divehi | 66,702 | - | - |
| 99 | bo | Tibetan | 54,185 | - | - |
| 100 | sh | Serbian (Latin) | 45,619 | - | - |
| 101 | yo | Yoruba | 192 | 42,943 | 0.00 |
| 102 | bs | Bosnian | 1,237 | 39,768 | 0.00 |
| 103 | azb | South Azerbaijani | 29,833 | - | - |
| 104 | ht | Haitian Creole | 12 | 26,183 | 0.00 |
| 105 | war | Waray | 23,687 | - | - |
| 106 | cv | Chuvash | 22,570 | - | - |
| 107 | sah | Sakha | 22,141 | - | - |
| 108 | li | Limburgish | 206 | 18,532 | 0.00 |
| 109 | ce | Chechen | 17,322 | - | - |
| 110 | pnb | Western Panjabi | 15,625 | - | - |
| 111 | nds | Low German | 15,139 | - | - |
| 112 | tk | Turkmen | 14,393 | - | - |
| 113 | gn | Guarani | 103 | 12,708 | 0.00 |
| 114 | oc | Occitan | 10,556 | - | - |
| 115 | xmf | Mingrelian | 9,706 | - | - |
| 116 | ast | Asturian | 9,002 | - | - |
| 117 | os | Ossetic | 8,596 | - | - |
| 118 | mhr | Eastern Mari | 7,883 | - | - |
| 119 | pms | Piedmontese | 7,566 | - | - |
| 120 | als[*] | Swiss German | 6,936 | - | - |
| 121 | vo | Volapük | 6,621 | - | - |
| 122 | so | Somali | 39 | 6,053 | 0.00 |
| 123 | bpy | Bishnupriya | 5,087 | - | - |
| 124 | new | Newari | 4,344 | - | - |
| 125 | hsb | Upper Sorbian | 4,244 | - | - |
| 126 | lmo | Lombard | 3,530 | - | - |
| 127 | an | Aragonese | 2,746 | - | - |
| 128 | ilo | Iloko | 2,328 | - | - |
| 129 | mzn | Mazanderani | 1,914 | - | - |
| 130 | lez | Lezghian | 1,806 | - | - |
| 131 | rm | Romansh | 30 | 1,769 | 0.00 |
| 132 | krc | Karachay-Balkar | 1,745 | - | - |
| 133 | min | Minangkabau | 1,429 | - | - |
| 134 | kv | Komi | 1,396 | - | - |
| 135 | wa | Walloon | 1,383 | - | - |
| 136 | jbo | Lojban | 1,349 | - | - |
| 137 | io | Ido | 1,144 | - | - |
| 138 | mrj | Western Mari | 1,056 | - | - |
| 139 | gom | Goan Konkani | 721 | - | - |
| 140 | ia | Interlingua | 613 | - | - |
| 141 | av | Avaric | 438 | - | - |
| 142 | bh | Bihari languages | 265 | - | - |
| 143 | wuu | Wu Chinese | 222 | - | - |
| 144 | nah | Nahuatl languages | 131 | - | - |
| 145 | vec | Venetian | 113 | - | - |
| 146 | bxr | Russia Buriat | 100 | - | - |
| 147 | kw | Cornish | 94 | - | - |
| 148 | mai | Maithili | 93 | - | - |
| 149 | eml[*] | Emiliano-Romagnol | 91 | - | - |
| 150 | dsb | Lower Sorbian | 59 | - | - |
| 151 | xal | Kalmyk | 51 | - | - |
| 152 | lrc | Northern Luri | 43 | - | - |
| 153 | nap | Neapolitan | 31 | - | - |
| 154 | tyv | Tuvinian | 23 | - | - |
| 155 | scn | Sicilian | 21 | - | - |
| 156 | frr | Northern Frisian | 11 | - | - |
| 157 | mwl | Mirandese | 9 | - | - |
| 158 | myv | Erzya | 4 | - | - |
| 159 | ie | Interlingue | 4 | - | - |
| 160 | pam | Pampanga | 4 | - | - |
| 161 | bar | Bavarian | 3 | - | - |
| 162 | yue | Yue Chinese | 3 | - | - |
| 163 | cbk | Chavacano | 2 | - | - |
| 164 | bcl | Central Bikol | 1 | - | - |
| 165 | vls | West Flemish | 1 | - | - |
| 166 | rue | Rusyn | 1 | - | - |
### Dataset Structure
```json
{
"text": ...,
"timestamp": ...,
"url": ...,
"source": "mc4" | "OSCAR-xxxx",
}
```
## Considerations for Using the Data
As CulturaX is the cleaned version of the mC4 and OSCAR datasets, which were both extracted from CommonCrawl, personal and sensitive information might still contain personal and sensitive information.
This must be considered prior to using this dataset for any purpose, such as training deep learning models, etc.
## License Information
The licence terms for CulturaX strictly follows those of `mC4` and `OSCAR`. Please refer to both below licenses when using this dataset.
- [mC4 license](https://huggingface.co/datasets/allenai/c4#license)
- [OSCAR license](https://huggingface.co/datasets/oscar-corpus/OSCAR-2301#licensing-information)
## Citation
To cite CulturaX, please use:
```
@misc{nguyen2023culturax,
title={CulturaX: A Cleaned, Enormous, and Multilingual Dataset for Large Language Models in 167 Languages},
author={Thuat Nguyen and Chien Van Nguyen and Viet Dac Lai and Hieu Man and Nghia Trung Ngo and Franck Dernoncourt and Ryan A. Rossi and Thien Huu Nguyen},
year={2023},
eprint={2309.09400},
archivePrefix={arXiv},
primaryClass={cs.CL}
}
```
## Reference
[1] Linting Xue, Noah Constant, Adam Roberts, Mihir Kale, Rami Al-Rfou, Aditya Siddhant, Aditya Barua, and Colin Raffel. 2021. mT5: A massively multilingual
pre-trained text-to-text transformer. In NAACL 2021. https://huggingface.co/datasets/mc4
[2] Pedro Javier Ortiz Suárez, Benoît Sagot, and Laurent Romary. 2019. Asynchronous pipelines for processing huge corpora on medium to low resource infrastructures. In Proceedings of the Workshop on Challenges in the Management of Large Corpora (CMLC-
7) 2019. https://oscar-project.org/
[3] KenLM: Faster and smaller language model queries. In Proceedings of the Sixth
Workshop on Statistical Machine Translation, 2011.
| Madjakul/l-halversting | [
"task_categories:text-generation",
"task_categories:fill-mask",
"task_ids:language-modeling",
"task_ids:masked-language-modeling",
"annotations_creators:no-annotation",
"language_creators:found",
"multilinguality:multilingual",
"source_datasets:original",
"language:af",
"language:als",
"language:am",
"language:an",
"language:ar",
"language:arz",
"language:as",
"language:ast",
"language:av",
"language:az",
"language:azb",
"language:ba",
"language:bar",
"language:bcl",
"language:be",
"language:bg",
"language:bh",
"language:bn",
"language:bo",
"language:bpy",
"language:br",
"language:bs",
"language:bxr",
"language:ca",
"language:cbk",
"language:ce",
"language:ceb",
"language:ckb",
"language:cs",
"language:cv",
"language:cy",
"language:da",
"language:de",
"language:dsb",
"language:dv",
"language:el",
"language:eml",
"language:en",
"language:eo",
"language:es",
"language:et",
"language:eu",
"language:fa",
"language:fi",
"language:fr",
"language:frr",
"language:fy",
"language:ga",
"language:gd",
"language:gl",
"language:gn",
"language:gom",
"language:gu",
"language:he",
"language:hi",
"language:hr",
"language:hsb",
"language:ht",
"language:hu",
"language:hy",
"language:ia",
"language:id",
"language:ie",
"language:ilo",
"language:io",
"language:is",
"language:it",
"language:ja",
"language:jbo",
"language:jv",
"language:ka",
"language:kk",
"language:km",
"language:kn",
"language:ko",
"language:krc",
"language:ku",
"language:kv",
"language:kw",
"language:ky",
"language:la",
"language:lb",
"language:lez",
"language:li",
"language:lmo",
"language:lo",
"language:lrc",
"language:lt",
"language:lv",
"language:mai",
"language:mg",
"language:mhr",
"language:min",
"language:mk",
"language:ml",
"language:mn",
"language:mr",
"language:mrj",
"language:ms",
"language:mt",
"language:mwl",
"language:my",
"language:myv",
"language:mzn",
"language:nah",
"language:nap",
"language:nds",
"language:ne",
"language:new",
"language:nl",
"language:nn",
"language:no",
"language:oc",
"language:or",
"language:os",
"language:pa",
"language:pam",
"language:pl",
"language:pms",
"language:pnb",
"language:ps",
"language:pt",
"language:qu",
"language:rm",
"language:ro",
"language:ru",
"language:rue",
"language:sa",
"language:sah",
"language:scn",
"language:sd",
"language:sh",
"language:si",
"language:sk",
"language:sl",
"language:so",
"language:sq",
"language:sr",
"language:su",
"language:sv",
"language:sw",
"language:ta",
"language:te",
"language:tg",
"language:th",
"language:tk",
"language:tl",
"language:tr",
"language:tt",
"language:tyv",
"language:ug",
"language:uk",
"language:ur",
"language:uz",
"language:vec",
"language:vi",
"language:vls",
"language:vo",
"language:wa",
"language:war",
"language:wuu",
"language:xal",
"language:xmf",
"language:yi",
"language:yo",
"language:yue",
"language:zh",
"arxiv:2309.09400",
"region:us"
] | 2024-02-15T21:36:53+00:00 | {"annotations_creators": ["no-annotation"], "language_creators": ["found"], "language": ["af", "als", "am", "an", "ar", "arz", "as", "ast", "av", "az", "azb", "ba", "bar", "bcl", "be", "bg", "bh", "bn", "bo", "bpy", "br", "bs", "bxr", "ca", "cbk", "ce", "ceb", "ckb", "cs", "cv", "cy", "da", "de", "dsb", "dv", "el", "eml", "en", "eo", "es", "et", "eu", "fa", "fi", "fr", "frr", "fy", "ga", "gd", "gl", "gn", "gom", "gu", "he", "hi", "hr", "hsb", "ht", "hu", "hy", "ia", "id", "ie", "ilo", "io", "is", "it", "ja", "jbo", "jv", "ka", "kk", "km", "kn", "ko", "krc", "ku", "kv", "kw", "ky", "la", "lb", "lez", "li", "lmo", "lo", "lrc", "lt", "lv", "mai", "mg", "mhr", "min", "mk", "ml", "mn", "mr", "mrj", "ms", "mt", "mwl", "my", "myv", "mzn", "nah", "nap", "nds", "ne", "new", "nl", "nn", "no", "oc", "or", "os", "pa", "pam", "pl", "pms", "pnb", "ps", "pt", "qu", "rm", "ro", "ru", "rue", "sa", "sah", "scn", "sd", "sh", "si", "sk", "sl", "so", "sq", "sr", "su", "sv", "sw", "ta", "te", "tg", "th", "tk", "tl", "tr", "tt", "tyv", "ug", "uk", "ur", "uz", "vec", "vi", "vls", "vo", "wa", "war", "wuu", "xal", "xmf", "yi", "yo", "yue", "zh"], "multilinguality": ["multilingual"], "source_datasets": ["original"], "task_categories": ["text-generation", "fill-mask"], "task_ids": ["language-modeling", "masked-language-modeling"], "pretty_name": "LHALversting", "configs": [{"config_name": "de", "data_files": "de/*.tar.gz"}, {"config_name": "en", "data_files": "en/*.tar.gz"}, {"config_name": "fr", "data_files": "fr/*.tar.gz"}], "extra_gated_prompt": "By completing the form below, you acknowledge that the provided data is offered as is. Although we anticipate no problems, you accept full responsibility for any repercussions resulting from the use of this data. Furthermore, you agree that the data must not be utilized for malicious or harmful purposes towards humanity.", "extra_gated_fields": {"Name": "text", "Email": "text", "Affiliation": "text", "Country": "text", "Usecase": "text", "I have explicitly check with my jurisdiction and I confirm that downloading CulturaX is legal in the country/region where I am located right now, and for the use case that I have described above": "checkbox", "You agree to not attempt to determine the identity of individuals in this dataset": "checkbox"}} | 2024-02-16T19:53:17+00:00 |
88349b95507d871e30c9a5d2b862c34682315395 | trevorweston/monet | [
"region:us"
] | 2024-02-15T21:42:02+00:00 | {} | 2024-02-15T21:44:53+00:00 |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.