Datasets:

Modalities:
Text
Formats:
parquet
Languages:
English
ArXiv:
Libraries:
Datasets
pandas
License:
SaylorTwift HF Staff commited on
Commit
69eed68
·
verified ·
1 Parent(s): acd7cc6

Add 'college_medicine' config data files

Browse files
README.md CHANGED
@@ -336,6 +336,8 @@ dataset_info:
336
  features:
337
  - name: question
338
  dtype: string
 
 
339
  - name: choices
340
  sequence: string
341
  - name: answer
@@ -348,19 +350,19 @@ dataset_info:
348
  '3': D
349
  splits:
350
  - name: auxiliary_train
351
- num_bytes: 160601377
352
  num_examples: 99842
353
  - name: test
354
- num_bytes: 82397
355
  num_examples: 173
356
  - name: validation
357
- num_bytes: 7909
358
  num_examples: 22
359
  - name: dev
360
- num_bytes: 1670
361
  num_examples: 5
362
- download_size: 166184960
363
- dataset_size: 160693353
364
  - config_name: college_physics
365
  features:
366
  - name: question
@@ -1825,6 +1827,16 @@ configs:
1825
  path: college_mathematics/validation-*
1826
  - split: dev
1827
  path: college_mathematics/dev-*
 
 
 
 
 
 
 
 
 
 
1828
  ---
1829
 
1830
  # Dataset Card for MMLU
 
336
  features:
337
  - name: question
338
  dtype: string
339
+ - name: subject
340
+ dtype: string
341
  - name: choices
342
  sequence: string
343
  - name: answer
 
350
  '3': D
351
  splits:
352
  - name: auxiliary_train
353
+ num_bytes: 161000625
354
  num_examples: 99842
355
  - name: test
356
+ num_bytes: 85845
357
  num_examples: 173
358
  - name: validation
359
+ num_bytes: 8337
360
  num_examples: 22
361
  - name: dev
362
+ num_bytes: 1758
363
  num_examples: 5
364
+ download_size: 47218201
365
+ dataset_size: 161096565
366
  - config_name: college_physics
367
  features:
368
  - name: question
 
1827
  path: college_mathematics/validation-*
1828
  - split: dev
1829
  path: college_mathematics/dev-*
1830
+ - config_name: college_medicine
1831
+ data_files:
1832
+ - split: auxiliary_train
1833
+ path: college_medicine/auxiliary_train-*
1834
+ - split: test
1835
+ path: college_medicine/test-*
1836
+ - split: validation
1837
+ path: college_medicine/validation-*
1838
+ - split: dev
1839
+ path: college_medicine/dev-*
1840
  ---
1841
 
1842
  # Dataset Card for MMLU
college_medicine/auxiliary_train-00000-of-00001.parquet ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c2782fc860f57d9345a9233ab04f494b0af5ae85b893a27853f7014b14a3bd07
3
+ size 47163955
college_medicine/dev-00000-of-00001.parquet ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f66833a9fd0998de367c581b4f93c1ea2ab17242f7864a929a78260f39d31aef
3
+ size 4114
college_medicine/test-00000-of-00001.parquet ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a9bbba81068109cc5caf6235b16c494f2f1e58cfdc0e88a1767514c4da7c0435
3
+ size 41803
college_medicine/validation-00000-of-00001.parquet ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4c086dcf941908b34711d65032646c349fa15c83681cd8ec99a04271a334a784
3
+ size 8329
dataset_infos.json CHANGED
@@ -637,39 +637,34 @@
637
  "features": {
638
  "question": {
639
  "dtype": "string",
640
- "id": null,
 
 
 
641
  "_type": "Value"
642
  },
643
  "choices": {
644
  "feature": {
645
  "dtype": "string",
646
- "id": null,
647
  "_type": "Value"
648
  },
649
- "length": -1,
650
- "id": null,
651
  "_type": "Sequence"
652
  },
653
  "answer": {
654
- "num_classes": 4,
655
  "names": [
656
  "A",
657
  "B",
658
  "C",
659
  "D"
660
  ],
661
- "id": null,
662
  "_type": "ClassLabel"
663
  }
664
  },
665
- "post_processed": null,
666
- "supervised_keys": null,
667
- "task_templates": null,
668
- "builder_name": "mmlu",
669
  "config_name": "college_medicine",
670
  "version": {
671
  "version_str": "1.0.0",
672
- "description": null,
673
  "major": 1,
674
  "minor": 0,
675
  "patch": 0
@@ -677,39 +672,32 @@
677
  "splits": {
678
  "auxiliary_train": {
679
  "name": "auxiliary_train",
680
- "num_bytes": 160601257,
681
  "num_examples": 99842,
682
- "dataset_name": "mmlu"
683
  },
684
  "test": {
685
  "name": "test",
686
- "num_bytes": 82385,
687
  "num_examples": 173,
688
- "dataset_name": "mmlu"
689
  },
690
  "validation": {
691
  "name": "validation",
692
- "num_bytes": 7897,
693
  "num_examples": 22,
694
- "dataset_name": "mmlu"
695
  },
696
  "dev": {
697
  "name": "dev",
698
- "num_bytes": 1658,
699
  "num_examples": 5,
700
- "dataset_name": "mmlu"
701
- }
702
- },
703
- "download_checksums": {
704
- "data.tar": {
705
- "num_bytes": 166184960,
706
- "checksum": "bec563ba4bac1d6aaf04141cd7d1605d7a5ca833e38f994051e818489592989b"
707
  }
708
  },
709
- "download_size": 166184960,
710
- "post_processing_size": null,
711
- "dataset_size": 160693197,
712
- "size_in_bytes": 326878157
713
  },
714
  "college_physics": {
715
  "description": "This is a massive multitask test consisting of multiple-choice questions from various branches of knowledge, covering 57 tasks including elementary mathematics, US history, computer science, law, and more.\n",
 
637
  "features": {
638
  "question": {
639
  "dtype": "string",
640
+ "_type": "Value"
641
+ },
642
+ "subject": {
643
+ "dtype": "string",
644
  "_type": "Value"
645
  },
646
  "choices": {
647
  "feature": {
648
  "dtype": "string",
 
649
  "_type": "Value"
650
  },
 
 
651
  "_type": "Sequence"
652
  },
653
  "answer": {
 
654
  "names": [
655
  "A",
656
  "B",
657
  "C",
658
  "D"
659
  ],
 
660
  "_type": "ClassLabel"
661
  }
662
  },
663
+ "builder_name": "parquet",
664
+ "dataset_name": "mmlu",
 
 
665
  "config_name": "college_medicine",
666
  "version": {
667
  "version_str": "1.0.0",
 
668
  "major": 1,
669
  "minor": 0,
670
  "patch": 0
 
672
  "splits": {
673
  "auxiliary_train": {
674
  "name": "auxiliary_train",
675
+ "num_bytes": 161000625,
676
  "num_examples": 99842,
677
+ "dataset_name": null
678
  },
679
  "test": {
680
  "name": "test",
681
+ "num_bytes": 85845,
682
  "num_examples": 173,
683
+ "dataset_name": null
684
  },
685
  "validation": {
686
  "name": "validation",
687
+ "num_bytes": 8337,
688
  "num_examples": 22,
689
+ "dataset_name": null
690
  },
691
  "dev": {
692
  "name": "dev",
693
+ "num_bytes": 1758,
694
  "num_examples": 5,
695
+ "dataset_name": null
 
 
 
 
 
 
696
  }
697
  },
698
+ "download_size": 47218201,
699
+ "dataset_size": 161096565,
700
+ "size_in_bytes": 208314766
 
701
  },
702
  "college_physics": {
703
  "description": "This is a massive multitask test consisting of multiple-choice questions from various branches of knowledge, covering 57 tasks including elementary mathematics, US history, computer science, law, and more.\n",