Vittorio Pippi commited on
Commit
5e8408d
·
1 Parent(s): 831cd58

Generate script

Browse files
Files changed (1) hide show
  1. generate_webdataset.py +110 -0
generate_webdataset.py ADDED
@@ -0,0 +1,110 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Script to generate WebDataset tar shards from images.
3
+ """
4
+
5
+ import argparse
6
+ import pickle
7
+ import json
8
+ import tarfile
9
+ from pathlib import Path
10
+ from io import BytesIO
11
+
12
+ import torch
13
+ from torch.utils.data import DataLoader
14
+ from torchvision.transforms import ToPILImage
15
+ from PIL import Image
16
+ from tqdm import tqdm
17
+
18
+ from custom_datasets import OnlineFontSquare, TextSampler
19
+
20
+ def parse_arguments():
21
+ """Parses command-line arguments."""
22
+ parser = argparse.ArgumentParser(description='Generate WebDataset tar shards from images')
23
+ parser.add_argument('--output_dir', type=str, default='/home/vpippi/font-square-v2/tars/fine_tune', help='Output directory')
24
+ parser.add_argument('--fonts', type=str, default='files/font_square/clean_fonts', help='Fonts path')
25
+ parser.add_argument('--backgrounds', type=str, default='files/font_square/backgrounds', help='Backgrounds path')
26
+ parser.add_argument('--renderers', type=str, help='Renderers path')
27
+ parser.add_argument('--db_multiplier', type=int, default=1, help='Dataset multiplier')
28
+ parser.add_argument('--dataloader_num_workers', type=int, default=15, help='Dataloader num workers')
29
+ parser.add_argument('--shard_size', type=int, default=4000, help='Samples per tar shard')
30
+ return parser.parse_args()
31
+
32
+ def setup_dataset(args):
33
+ """Initializes dataset and sampler."""
34
+ sampler = TextSampler(4, 128, (1, 32))
35
+
36
+ renderers = None
37
+ if args.renderers:
38
+ with open(args.renderers, 'rb') as f:
39
+ renderers = pickle.load(f)
40
+
41
+ dataset = OnlineFontSquare(args.fonts, args.backgrounds, sampler, renderers=renderers)
42
+ dataset.length *= args.db_multiplier
43
+ return dataset
44
+
45
+ def add_bytes_to_tar(tar, filename, data_bytes):
46
+ """Adds a file to the tar archive."""
47
+ ti = tarfile.TarInfo(name=filename)
48
+ ti.size = len(data_bytes)
49
+ tar.addfile(ti, BytesIO(data_bytes))
50
+
51
+ def process_samples(loader, args):
52
+ """Processes dataset samples and writes them into tar shards."""
53
+ output_dir = Path(args.output_dir)
54
+ output_dir.mkdir(parents=True, exist_ok=True)
55
+ to_pil = ToPILImage()
56
+
57
+ shard_idx, sample_in_shard, total_samples = 0, 0, 0
58
+ tar_path = output_dir / f'{shard_idx:06d}.tar'
59
+ tar = tarfile.open(tar_path, mode='w')
60
+
61
+ try:
62
+ for sample in tqdm(loader, desc="Processing samples"):
63
+ text = sample['text'][0].strip()
64
+ rgb_img_tensor = sample['img'][0]
65
+ bw_img_tensor = sample['text_img'][0]
66
+ writer_id = sample['writer'][0].item()
67
+
68
+ key = f'{sample_in_shard:06d}'
69
+ rgb_pil = to_pil((rgb_img_tensor + 1) / 2)
70
+ bw_pil = to_pil((bw_img_tensor + 1) / 2)
71
+
72
+ rgb_bytes_io = BytesIO()
73
+ rgb_pil.save(rgb_bytes_io, format='PNG')
74
+ rgb_bytes = rgb_bytes_io.getvalue()
75
+
76
+ bw_bytes_io = BytesIO()
77
+ bw_pil.save(bw_bytes_io, format='PNG')
78
+ bw_bytes = bw_bytes_io.getvalue()
79
+
80
+ metadata = json.dumps({"text": text, "writer_id": writer_id}).encode('utf-8')
81
+
82
+ add_bytes_to_tar(tar, f'{key}.rgb.png', rgb_bytes)
83
+ add_bytes_to_tar(tar, f'{key}.bw.png', bw_bytes)
84
+ add_bytes_to_tar(tar, f'{key}.json', metadata)
85
+
86
+ sample_in_shard += 1
87
+ total_samples += 1
88
+
89
+ if sample_in_shard >= args.shard_size:
90
+ tar.close()
91
+ shard_idx += 1
92
+ sample_in_shard = 0
93
+ tar_path = output_dir / f'{shard_idx:06d}.tar'
94
+ tar = tarfile.open(tar_path, mode='w')
95
+
96
+ except KeyboardInterrupt:
97
+ print("Interrupted by user.")
98
+ finally:
99
+ tar.close()
100
+
101
+ print(f"Finished writing {total_samples} samples in {shard_idx+1} tar shards into {output_dir}")
102
+
103
+ def main():
104
+ args = parse_arguments()
105
+ dataset = setup_dataset(args)
106
+ loader = DataLoader(dataset, batch_size=1, shuffle=False, num_workers=args.dataloader_num_workers)
107
+ process_samples(loader, args)
108
+
109
+ if __name__ == '__main__':
110
+ main()