Datasets:

Modalities:
Text
Formats:
parquet
Languages:
English
ArXiv:
Libraries:
Datasets
pandas
License:
SaylorTwift HF Staff commited on
Commit
0a7edb8
·
verified ·
1 Parent(s): 9a5d49e

Add 'college_chemistry' config data files

Browse files
README.md CHANGED
@@ -243,6 +243,8 @@ dataset_info:
243
  features:
244
  - name: question
245
  dtype: string
 
 
246
  - name: choices
247
  sequence: string
248
  - name: answer
@@ -255,19 +257,19 @@ dataset_info:
255
  '3': D
256
  splits:
257
  - name: auxiliary_train
258
- num_bytes: 160601377
259
  num_examples: 99842
260
  - name: test
261
- num_bytes: 24708
262
  num_examples: 100
263
  - name: validation
264
- num_bytes: 2328
265
  num_examples: 8
266
  - name: dev
267
- num_bytes: 1331
268
  num_examples: 5
269
- download_size: 166184960
270
- dataset_size: 160629744
271
  - config_name: college_computer_science
272
  features:
273
  - name: question
@@ -1789,6 +1791,16 @@ configs:
1789
  path: college_biology/validation-*
1790
  - split: dev
1791
  path: college_biology/dev-*
 
 
 
 
 
 
 
 
 
 
1792
  ---
1793
 
1794
  # Dataset Card for MMLU
 
243
  features:
244
  - name: question
245
  dtype: string
246
+ - name: subject
247
+ dtype: string
248
  - name: choices
249
  sequence: string
250
  - name: answer
 
257
  '3': D
258
  splits:
259
  - name: auxiliary_train
260
+ num_bytes: 161000625
261
  num_examples: 99842
262
  - name: test
263
+ num_bytes: 26796
264
  num_examples: 100
265
  - name: validation
266
+ num_bytes: 2484
267
  num_examples: 8
268
  - name: dev
269
+ num_bytes: 1424
270
  num_examples: 5
271
+ download_size: 47188958
272
+ dataset_size: 161031329
273
  - config_name: college_computer_science
274
  features:
275
  - name: question
 
1791
  path: college_biology/validation-*
1792
  - split: dev
1793
  path: college_biology/dev-*
1794
+ - config_name: college_chemistry
1795
+ data_files:
1796
+ - split: auxiliary_train
1797
+ path: college_chemistry/auxiliary_train-*
1798
+ - split: test
1799
+ path: college_chemistry/test-*
1800
+ - split: validation
1801
+ path: college_chemistry/validation-*
1802
+ - split: dev
1803
+ path: college_chemistry/dev-*
1804
  ---
1805
 
1806
  # Dataset Card for MMLU
college_chemistry/auxiliary_train-00000-of-00001.parquet ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c2782fc860f57d9345a9233ab04f494b0af5ae85b893a27853f7014b14a3bd07
3
+ size 47163955
college_chemistry/dev-00000-of-00001.parquet ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a8c997d19239dfe84cc4d74acd82d550f1cb33ea3915f6afa997b2f71f7c5094
3
+ size 3484
college_chemistry/test-00000-of-00001.parquet ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e1d9477056f186d4942cb96b56b3bf084ab9f90a4204bdf977d78370d10ed7c8
3
+ size 17294
college_chemistry/validation-00000-of-00001.parquet ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9c9d41e406c94bce386f9984c8ac1d6d96b8b384dc2ff602bb29082ad0d1209b
3
+ size 4225
dataset_infos.json CHANGED
@@ -427,39 +427,34 @@
427
  "features": {
428
  "question": {
429
  "dtype": "string",
430
- "id": null,
 
 
 
431
  "_type": "Value"
432
  },
433
  "choices": {
434
  "feature": {
435
  "dtype": "string",
436
- "id": null,
437
  "_type": "Value"
438
  },
439
- "length": -1,
440
- "id": null,
441
  "_type": "Sequence"
442
  },
443
  "answer": {
444
- "num_classes": 4,
445
  "names": [
446
  "A",
447
  "B",
448
  "C",
449
  "D"
450
  ],
451
- "id": null,
452
  "_type": "ClassLabel"
453
  }
454
  },
455
- "post_processed": null,
456
- "supervised_keys": null,
457
- "task_templates": null,
458
- "builder_name": "mmlu",
459
  "config_name": "college_chemistry",
460
  "version": {
461
  "version_str": "1.0.0",
462
- "description": null,
463
  "major": 1,
464
  "minor": 0,
465
  "patch": 0
@@ -467,39 +462,32 @@
467
  "splits": {
468
  "auxiliary_train": {
469
  "name": "auxiliary_train",
470
- "num_bytes": 160601257,
471
  "num_examples": 99842,
472
- "dataset_name": "mmlu"
473
  },
474
  "test": {
475
  "name": "test",
476
- "num_bytes": 24696,
477
  "num_examples": 100,
478
- "dataset_name": "mmlu"
479
  },
480
  "validation": {
481
  "name": "validation",
482
- "num_bytes": 2316,
483
  "num_examples": 8,
484
- "dataset_name": "mmlu"
485
  },
486
  "dev": {
487
  "name": "dev",
488
- "num_bytes": 1319,
489
  "num_examples": 5,
490
- "dataset_name": "mmlu"
491
- }
492
- },
493
- "download_checksums": {
494
- "data.tar": {
495
- "num_bytes": 166184960,
496
- "checksum": "bec563ba4bac1d6aaf04141cd7d1605d7a5ca833e38f994051e818489592989b"
497
  }
498
  },
499
- "download_size": 166184960,
500
- "post_processing_size": null,
501
- "dataset_size": 160629588,
502
- "size_in_bytes": 326814548
503
  },
504
  "college_computer_science": {
505
  "description": "This is a massive multitask test consisting of multiple-choice questions from various branches of knowledge, covering 57 tasks including elementary mathematics, US history, computer science, law, and more.\n",
 
427
  "features": {
428
  "question": {
429
  "dtype": "string",
430
+ "_type": "Value"
431
+ },
432
+ "subject": {
433
+ "dtype": "string",
434
  "_type": "Value"
435
  },
436
  "choices": {
437
  "feature": {
438
  "dtype": "string",
 
439
  "_type": "Value"
440
  },
 
 
441
  "_type": "Sequence"
442
  },
443
  "answer": {
 
444
  "names": [
445
  "A",
446
  "B",
447
  "C",
448
  "D"
449
  ],
 
450
  "_type": "ClassLabel"
451
  }
452
  },
453
+ "builder_name": "parquet",
454
+ "dataset_name": "mmlu",
 
 
455
  "config_name": "college_chemistry",
456
  "version": {
457
  "version_str": "1.0.0",
 
458
  "major": 1,
459
  "minor": 0,
460
  "patch": 0
 
462
  "splits": {
463
  "auxiliary_train": {
464
  "name": "auxiliary_train",
465
+ "num_bytes": 161000625,
466
  "num_examples": 99842,
467
+ "dataset_name": null
468
  },
469
  "test": {
470
  "name": "test",
471
+ "num_bytes": 26796,
472
  "num_examples": 100,
473
+ "dataset_name": null
474
  },
475
  "validation": {
476
  "name": "validation",
477
+ "num_bytes": 2484,
478
  "num_examples": 8,
479
+ "dataset_name": null
480
  },
481
  "dev": {
482
  "name": "dev",
483
+ "num_bytes": 1424,
484
  "num_examples": 5,
485
+ "dataset_name": null
 
 
 
 
 
 
486
  }
487
  },
488
+ "download_size": 47188958,
489
+ "dataset_size": 161031329,
490
+ "size_in_bytes": 208220287
 
491
  },
492
  "college_computer_science": {
493
  "description": "This is a massive multitask test consisting of multiple-choice questions from various branches of knowledge, covering 57 tasks including elementary mathematics, US history, computer science, law, and more.\n",