font-square-v2 / generate_webdataset.py
Vittorio Pippi
Generate script
5e8408d
"""
Script to generate WebDataset tar shards from images.
"""
import argparse
import pickle
import json
import tarfile
from pathlib import Path
from io import BytesIO
import torch
from torch.utils.data import DataLoader
from torchvision.transforms import ToPILImage
from PIL import Image
from tqdm import tqdm
from custom_datasets import OnlineFontSquare, TextSampler
def parse_arguments():
"""Parses command-line arguments."""
parser = argparse.ArgumentParser(description='Generate WebDataset tar shards from images')
parser.add_argument('--output_dir', type=str, default='/home/vpippi/font-square-v2/tars/fine_tune', help='Output directory')
parser.add_argument('--fonts', type=str, default='files/font_square/clean_fonts', help='Fonts path')
parser.add_argument('--backgrounds', type=str, default='files/font_square/backgrounds', help='Backgrounds path')
parser.add_argument('--renderers', type=str, help='Renderers path')
parser.add_argument('--db_multiplier', type=int, default=1, help='Dataset multiplier')
parser.add_argument('--dataloader_num_workers', type=int, default=15, help='Dataloader num workers')
parser.add_argument('--shard_size', type=int, default=4000, help='Samples per tar shard')
return parser.parse_args()
def setup_dataset(args):
"""Initializes dataset and sampler."""
sampler = TextSampler(4, 128, (1, 32))
renderers = None
if args.renderers:
with open(args.renderers, 'rb') as f:
renderers = pickle.load(f)
dataset = OnlineFontSquare(args.fonts, args.backgrounds, sampler, renderers=renderers)
dataset.length *= args.db_multiplier
return dataset
def add_bytes_to_tar(tar, filename, data_bytes):
"""Adds a file to the tar archive."""
ti = tarfile.TarInfo(name=filename)
ti.size = len(data_bytes)
tar.addfile(ti, BytesIO(data_bytes))
def process_samples(loader, args):
"""Processes dataset samples and writes them into tar shards."""
output_dir = Path(args.output_dir)
output_dir.mkdir(parents=True, exist_ok=True)
to_pil = ToPILImage()
shard_idx, sample_in_shard, total_samples = 0, 0, 0
tar_path = output_dir / f'{shard_idx:06d}.tar'
tar = tarfile.open(tar_path, mode='w')
try:
for sample in tqdm(loader, desc="Processing samples"):
text = sample['text'][0].strip()
rgb_img_tensor = sample['img'][0]
bw_img_tensor = sample['text_img'][0]
writer_id = sample['writer'][0].item()
key = f'{sample_in_shard:06d}'
rgb_pil = to_pil((rgb_img_tensor + 1) / 2)
bw_pil = to_pil((bw_img_tensor + 1) / 2)
rgb_bytes_io = BytesIO()
rgb_pil.save(rgb_bytes_io, format='PNG')
rgb_bytes = rgb_bytes_io.getvalue()
bw_bytes_io = BytesIO()
bw_pil.save(bw_bytes_io, format='PNG')
bw_bytes = bw_bytes_io.getvalue()
metadata = json.dumps({"text": text, "writer_id": writer_id}).encode('utf-8')
add_bytes_to_tar(tar, f'{key}.rgb.png', rgb_bytes)
add_bytes_to_tar(tar, f'{key}.bw.png', bw_bytes)
add_bytes_to_tar(tar, f'{key}.json', metadata)
sample_in_shard += 1
total_samples += 1
if sample_in_shard >= args.shard_size:
tar.close()
shard_idx += 1
sample_in_shard = 0
tar_path = output_dir / f'{shard_idx:06d}.tar'
tar = tarfile.open(tar_path, mode='w')
except KeyboardInterrupt:
print("Interrupted by user.")
finally:
tar.close()
print(f"Finished writing {total_samples} samples in {shard_idx+1} tar shards into {output_dir}")
def main():
args = parse_arguments()
dataset = setup_dataset(args)
loader = DataLoader(dataset, batch_size=1, shuffle=False, num_workers=args.dataloader_num_workers)
process_samples(loader, args)
if __name__ == '__main__':
main()