Datasets:

Modalities:
Text
Formats:
parquet
Languages:
English
ArXiv:
Libraries:
Datasets
pandas
License:
SaylorTwift HF Staff commited on
Commit
5c4a97b
·
verified ·
1 Parent(s): 69eed68

Add 'college_physics' config data files

Browse files
README.md CHANGED
@@ -367,6 +367,8 @@ dataset_info:
367
  features:
368
  - name: question
369
  dtype: string
 
 
370
  - name: choices
371
  sequence: string
372
  - name: answer
@@ -379,19 +381,19 @@ dataset_info:
379
  '3': D
380
  splits:
381
  - name: auxiliary_train
382
- num_bytes: 160601377
383
  num_examples: 99842
384
  - name: test
385
- num_bytes: 30181
386
  num_examples: 102
387
  - name: validation
388
- num_bytes: 3490
389
  num_examples: 11
390
  - name: dev
391
- num_bytes: 1412
392
  num_examples: 5
393
- download_size: 166184960
394
- dataset_size: 160636460
395
  - config_name: computer_security
396
  features:
397
  - name: question
@@ -1837,6 +1839,16 @@ configs:
1837
  path: college_medicine/validation-*
1838
  - split: dev
1839
  path: college_medicine/dev-*
 
 
 
 
 
 
 
 
 
 
1840
  ---
1841
 
1842
  # Dataset Card for MMLU
 
367
  features:
368
  - name: question
369
  dtype: string
370
+ - name: subject
371
+ dtype: string
372
  - name: choices
373
  sequence: string
374
  - name: answer
 
381
  '3': D
382
  splits:
383
  - name: auxiliary_train
384
+ num_bytes: 161000625
385
  num_examples: 99842
386
  - name: test
387
+ num_bytes: 32107
388
  num_examples: 102
389
  - name: validation
390
+ num_bytes: 3687
391
  num_examples: 11
392
  - name: dev
393
+ num_bytes: 1495
394
  num_examples: 5
395
+ download_size: 47190901
396
+ dataset_size: 161037914
397
  - config_name: computer_security
398
  features:
399
  - name: question
 
1839
  path: college_medicine/validation-*
1840
  - split: dev
1841
  path: college_medicine/dev-*
1842
+ - config_name: college_physics
1843
+ data_files:
1844
+ - split: auxiliary_train
1845
+ path: college_physics/auxiliary_train-*
1846
+ - split: test
1847
+ path: college_physics/test-*
1848
+ - split: validation
1849
+ path: college_physics/validation-*
1850
+ - split: dev
1851
+ path: college_physics/dev-*
1852
  ---
1853
 
1854
  # Dataset Card for MMLU
college_physics/auxiliary_train-00000-of-00001.parquet ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c2782fc860f57d9345a9233ab04f494b0af5ae85b893a27853f7014b14a3bd07
3
+ size 47163955
college_physics/dev-00000-of-00001.parquet ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:694320e1d4db89309520964f9fbcb53e284b3f098cbd6acdebd4ce1022d03b55
3
+ size 3760
college_physics/test-00000-of-00001.parquet ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:11d068a92d8690700e8b5db5efa4c3c1215e3b27870ec83ff2675e1a58a30f0a
3
+ size 17653
college_physics/validation-00000-of-00001.parquet ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3accba591a12e012a5d2aad7346559d48039e65c591c14a815e3cfc6b85789cb
3
+ size 5533
dataset_infos.json CHANGED
@@ -707,39 +707,34 @@
707
  "features": {
708
  "question": {
709
  "dtype": "string",
710
- "id": null,
 
 
 
711
  "_type": "Value"
712
  },
713
  "choices": {
714
  "feature": {
715
  "dtype": "string",
716
- "id": null,
717
  "_type": "Value"
718
  },
719
- "length": -1,
720
- "id": null,
721
  "_type": "Sequence"
722
  },
723
  "answer": {
724
- "num_classes": 4,
725
  "names": [
726
  "A",
727
  "B",
728
  "C",
729
  "D"
730
  ],
731
- "id": null,
732
  "_type": "ClassLabel"
733
  }
734
  },
735
- "post_processed": null,
736
- "supervised_keys": null,
737
- "task_templates": null,
738
- "builder_name": "mmlu",
739
  "config_name": "college_physics",
740
  "version": {
741
  "version_str": "1.0.0",
742
- "description": null,
743
  "major": 1,
744
  "minor": 0,
745
  "patch": 0
@@ -747,39 +742,32 @@
747
  "splits": {
748
  "auxiliary_train": {
749
  "name": "auxiliary_train",
750
- "num_bytes": 160601257,
751
  "num_examples": 99842,
752
- "dataset_name": "mmlu"
753
  },
754
  "test": {
755
  "name": "test",
756
- "num_bytes": 30169,
757
  "num_examples": 102,
758
- "dataset_name": "mmlu"
759
  },
760
  "validation": {
761
  "name": "validation",
762
- "num_bytes": 3478,
763
  "num_examples": 11,
764
- "dataset_name": "mmlu"
765
  },
766
  "dev": {
767
  "name": "dev",
768
- "num_bytes": 1400,
769
  "num_examples": 5,
770
- "dataset_name": "mmlu"
771
- }
772
- },
773
- "download_checksums": {
774
- "data.tar": {
775
- "num_bytes": 166184960,
776
- "checksum": "bec563ba4bac1d6aaf04141cd7d1605d7a5ca833e38f994051e818489592989b"
777
  }
778
  },
779
- "download_size": 166184960,
780
- "post_processing_size": null,
781
- "dataset_size": 160636304,
782
- "size_in_bytes": 326821264
783
  },
784
  "computer_security": {
785
  "description": "This is a massive multitask test consisting of multiple-choice questions from various branches of knowledge, covering 57 tasks including elementary mathematics, US history, computer science, law, and more.\n",
 
707
  "features": {
708
  "question": {
709
  "dtype": "string",
710
+ "_type": "Value"
711
+ },
712
+ "subject": {
713
+ "dtype": "string",
714
  "_type": "Value"
715
  },
716
  "choices": {
717
  "feature": {
718
  "dtype": "string",
 
719
  "_type": "Value"
720
  },
 
 
721
  "_type": "Sequence"
722
  },
723
  "answer": {
 
724
  "names": [
725
  "A",
726
  "B",
727
  "C",
728
  "D"
729
  ],
 
730
  "_type": "ClassLabel"
731
  }
732
  },
733
+ "builder_name": "parquet",
734
+ "dataset_name": "mmlu",
 
 
735
  "config_name": "college_physics",
736
  "version": {
737
  "version_str": "1.0.0",
 
738
  "major": 1,
739
  "minor": 0,
740
  "patch": 0
 
742
  "splits": {
743
  "auxiliary_train": {
744
  "name": "auxiliary_train",
745
+ "num_bytes": 161000625,
746
  "num_examples": 99842,
747
+ "dataset_name": null
748
  },
749
  "test": {
750
  "name": "test",
751
+ "num_bytes": 32107,
752
  "num_examples": 102,
753
+ "dataset_name": null
754
  },
755
  "validation": {
756
  "name": "validation",
757
+ "num_bytes": 3687,
758
  "num_examples": 11,
759
+ "dataset_name": null
760
  },
761
  "dev": {
762
  "name": "dev",
763
+ "num_bytes": 1495,
764
  "num_examples": 5,
765
+ "dataset_name": null
 
 
 
 
 
 
766
  }
767
  },
768
+ "download_size": 47190901,
769
+ "dataset_size": 161037914,
770
+ "size_in_bytes": 208228815
 
771
  },
772
  "computer_security": {
773
  "description": "This is a massive multitask test consisting of multiple-choice questions from various branches of knowledge, covering 57 tasks including elementary mathematics, US history, computer science, law, and more.\n",