Datasets:

Modalities:
Text
Formats:
parquet
Languages:
English
ArXiv:
Libraries:
Datasets
pandas
License:
SaylorTwift HF Staff commited on
Commit
9a5d49e
·
verified ·
1 Parent(s): 66efae1

Add 'college_biology' config data files

Browse files
README.md CHANGED
@@ -212,6 +212,8 @@ dataset_info:
212
  features:
213
  - name: question
214
  dtype: string
 
 
215
  - name: choices
216
  sequence: string
217
  - name: answer
@@ -224,19 +226,19 @@ dataset_info:
224
  '3': D
225
  splits:
226
  - name: auxiliary_train
227
- num_bytes: 160601377
228
  num_examples: 99842
229
  - name: test
230
- num_bytes: 48797
231
  num_examples: 144
232
  - name: validation
233
- num_bytes: 4819
234
  num_examples: 16
235
  - name: dev
236
- num_bytes: 1532
237
  num_examples: 5
238
- download_size: 166184960
239
- dataset_size: 160656525
240
  - config_name: college_chemistry
241
  features:
242
  - name: question
@@ -1777,6 +1779,16 @@ configs:
1777
  path: clinical_knowledge/validation-*
1778
  - split: dev
1779
  path: clinical_knowledge/dev-*
 
 
 
 
 
 
 
 
 
 
1780
  ---
1781
 
1782
  # Dataset Card for MMLU
 
212
  features:
213
  - name: question
214
  dtype: string
215
+ - name: subject
216
+ dtype: string
217
  - name: choices
218
  sequence: string
219
  - name: answer
 
226
  '3': D
227
  splits:
228
  - name: auxiliary_train
229
+ num_bytes: 161000625
230
  num_examples: 99842
231
  - name: test
232
+ num_bytes: 51521
233
  num_examples: 144
234
  - name: validation
235
+ num_bytes: 5111
236
  num_examples: 16
237
  - name: dev
238
+ num_bytes: 1615
239
  num_examples: 5
240
+ download_size: 47205152
241
+ dataset_size: 161058872
242
  - config_name: college_chemistry
243
  features:
244
  - name: question
 
1779
  path: clinical_knowledge/validation-*
1780
  - split: dev
1781
  path: clinical_knowledge/dev-*
1782
+ - config_name: college_biology
1783
+ data_files:
1784
+ - split: auxiliary_train
1785
+ path: college_biology/auxiliary_train-*
1786
+ - split: test
1787
+ path: college_biology/test-*
1788
+ - split: validation
1789
+ path: college_biology/validation-*
1790
+ - split: dev
1791
+ path: college_biology/dev-*
1792
  ---
1793
 
1794
  # Dataset Card for MMLU
college_biology/auxiliary_train-00000-of-00001.parquet ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c2782fc860f57d9345a9233ab04f494b0af5ae85b893a27853f7014b14a3bd07
3
+ size 47163955
college_biology/dev-00000-of-00001.parquet ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1552c2fbb6592c1edbf2212d094c3cba7b45d1d47175b9a4a401fc808da97c0e
3
+ size 3728
college_biology/test-00000-of-00001.parquet ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7b8e52220d1bf042201cb71e25a47d9cdf818ee4541f67205872f58e4b7d3201
3
+ size 31221
college_biology/validation-00000-of-00001.parquet ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:bb05d5f5c78a0e7d7c4bd6d0044e39888df40b576b5a9e615bc37904b29bae91
3
+ size 6248
dataset_infos.json CHANGED
@@ -357,39 +357,34 @@
357
  "features": {
358
  "question": {
359
  "dtype": "string",
360
- "id": null,
 
 
 
361
  "_type": "Value"
362
  },
363
  "choices": {
364
  "feature": {
365
  "dtype": "string",
366
- "id": null,
367
  "_type": "Value"
368
  },
369
- "length": -1,
370
- "id": null,
371
  "_type": "Sequence"
372
  },
373
  "answer": {
374
- "num_classes": 4,
375
  "names": [
376
  "A",
377
  "B",
378
  "C",
379
  "D"
380
  ],
381
- "id": null,
382
  "_type": "ClassLabel"
383
  }
384
  },
385
- "post_processed": null,
386
- "supervised_keys": null,
387
- "task_templates": null,
388
- "builder_name": "mmlu",
389
  "config_name": "college_biology",
390
  "version": {
391
  "version_str": "1.0.0",
392
- "description": null,
393
  "major": 1,
394
  "minor": 0,
395
  "patch": 0
@@ -397,39 +392,32 @@
397
  "splits": {
398
  "auxiliary_train": {
399
  "name": "auxiliary_train",
400
- "num_bytes": 160601257,
401
  "num_examples": 99842,
402
- "dataset_name": "mmlu"
403
  },
404
  "test": {
405
  "name": "test",
406
- "num_bytes": 48785,
407
  "num_examples": 144,
408
- "dataset_name": "mmlu"
409
  },
410
  "validation": {
411
  "name": "validation",
412
- "num_bytes": 4807,
413
  "num_examples": 16,
414
- "dataset_name": "mmlu"
415
  },
416
  "dev": {
417
  "name": "dev",
418
- "num_bytes": 1520,
419
  "num_examples": 5,
420
- "dataset_name": "mmlu"
421
- }
422
- },
423
- "download_checksums": {
424
- "data.tar": {
425
- "num_bytes": 166184960,
426
- "checksum": "bec563ba4bac1d6aaf04141cd7d1605d7a5ca833e38f994051e818489592989b"
427
  }
428
  },
429
- "download_size": 166184960,
430
- "post_processing_size": null,
431
- "dataset_size": 160656369,
432
- "size_in_bytes": 326841329
433
  },
434
  "college_chemistry": {
435
  "description": "This is a massive multitask test consisting of multiple-choice questions from various branches of knowledge, covering 57 tasks including elementary mathematics, US history, computer science, law, and more.\n",
 
357
  "features": {
358
  "question": {
359
  "dtype": "string",
360
+ "_type": "Value"
361
+ },
362
+ "subject": {
363
+ "dtype": "string",
364
  "_type": "Value"
365
  },
366
  "choices": {
367
  "feature": {
368
  "dtype": "string",
 
369
  "_type": "Value"
370
  },
 
 
371
  "_type": "Sequence"
372
  },
373
  "answer": {
 
374
  "names": [
375
  "A",
376
  "B",
377
  "C",
378
  "D"
379
  ],
 
380
  "_type": "ClassLabel"
381
  }
382
  },
383
+ "builder_name": "parquet",
384
+ "dataset_name": "mmlu",
 
 
385
  "config_name": "college_biology",
386
  "version": {
387
  "version_str": "1.0.0",
 
388
  "major": 1,
389
  "minor": 0,
390
  "patch": 0
 
392
  "splits": {
393
  "auxiliary_train": {
394
  "name": "auxiliary_train",
395
+ "num_bytes": 161000625,
396
  "num_examples": 99842,
397
+ "dataset_name": null
398
  },
399
  "test": {
400
  "name": "test",
401
+ "num_bytes": 51521,
402
  "num_examples": 144,
403
+ "dataset_name": null
404
  },
405
  "validation": {
406
  "name": "validation",
407
+ "num_bytes": 5111,
408
  "num_examples": 16,
409
+ "dataset_name": null
410
  },
411
  "dev": {
412
  "name": "dev",
413
+ "num_bytes": 1615,
414
  "num_examples": 5,
415
+ "dataset_name": null
 
 
 
 
 
 
416
  }
417
  },
418
+ "download_size": 47205152,
419
+ "dataset_size": 161058872,
420
+ "size_in_bytes": 208264024
 
421
  },
422
  "college_chemistry": {
423
  "description": "This is a massive multitask test consisting of multiple-choice questions from various branches of knowledge, covering 57 tasks including elementary mathematics, US history, computer science, law, and more.\n",