rdiehlmartinez commited on
Commit
15ab5c5
·
verified ·
1 Parent(s): f9acf49

Setting up data loading script

Browse files

Data loading script first attempt that simply returns the evaluation json for each checkpoint. Data splits correspond to the different model sizes.

Files changed (1) hide show
  1. pythia-training-evals.py +74 -0
pythia-training-evals.py ADDED
@@ -0,0 +1,74 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Dataset for storing training evaluations of pythia models, e.g. loss, perplexity
3
+ """
4
+
5
+ class PythiaTrainingEvals(datasets.GeneratorBasedBuilder):
6
+
7
+ MODEL_SIZES = [
8
+ "70m",
9
+ "160m",
10
+ "410m",
11
+ "1.4b",
12
+ "2.8b",
13
+ ]
14
+
15
+ BUILDER_CONFIGS = []
16
+ for model_size in MODEL_SIZES:
17
+ BUILDER_CONFIGS.extend([
18
+ datasets.BuilderConfig(
19
+ name=f"{model_size}",
20
+ description=f"Dataset of pythia training evaluation metrics for pythia model size: {model_size}",
21
+ version="1.0.0",
22
+ ),
23
+ ])
24
+
25
+ def _info(self):
26
+ return datasets.DatasetInfo(
27
+ description=_DESCRIPTION,
28
+ )
29
+
30
+
31
+ def _split_generators(self, dl_manager: datasets.DownloadManager):
32
+ """
33
+ Returns data for different splits - we define a split as a model size.
34
+ """
35
+
36
+ to_download_files = []
37
+ model_size = self.config.name.split("__")[0]
38
+
39
+ for checkpoint_step in checkpoint_steps:
40
+ to_download_files.append(f"./models/{model_size}/checkpoint_{checkpoint_step}/evals.json")
41
+
42
+ downloaded_files = dl_manager.download_and_extract(to_download_files)
43
+
44
+ return [
45
+ datasets.SplitGenerator(
46
+ name='default',
47
+ gen_kwargs={
48
+ "filepaths": downloaded_files,
49
+ }
50
+ )
51
+ ]
52
+
53
+ def _generate_examples(self, filepaths):
54
+ """
55
+ Yields examples from each file in filepaths that are stored as jsons
56
+ with the evaluation metrics for a given checkpoint step.
57
+ """
58
+
59
+ checkpoint_steps = [0, 1, 2, 4, 8, 16, 32, 64, 128, 256, 512, 1000, ]
60
+ checkpoint_steps.extend([3000 + (i * 10000) for i in range(0, 15)])
61
+
62
+ # the filepaths should be a list of filepaths
63
+ if isinstance(filepaths, str):
64
+ filepaths = [filepaths]
65
+
66
+ for idx, filepath in enumerate(filepaths):
67
+ with open(filepath, 'rb') as f:
68
+ data = json.load(f)
69
+
70
+ record = {
71
+ "checkpoint_step": checkpoint_steps[idx],
72
+ **data
73
+ }
74
+ yield idx, record