| import os | |
| import argparse | |
| import numpy as np | |
| from skimage import color, io | |
| import torch | |
| import torch.nn.functional as F | |
| from PIL import Image | |
| from models import ColorEncoder, ColorUNet | |
| os.environ["CUDA_VISIBLE_DEVICES"] = '0' | |
| def mkdirs(path): | |
| if not os.path.exists(path): | |
| os.makedirs(path) | |
| def Lab2RGB_out(img_lab): | |
| img_lab = img_lab.detach().cpu() | |
| img_l = img_lab[:,:1,:,:] | |
| img_ab = img_lab[:,1:,:,:] | |
| img_l = img_l + 50 | |
| pred_lab = torch.cat((img_l, img_ab), 1)[0,...].numpy() | |
| out = (np.clip(color.lab2rgb(pred_lab.transpose(1, 2, 0)), 0, 1) * 255).astype("uint8") | |
| return out | |
| def RGB2Lab(inputs): | |
| return color.rgb2lab(inputs) | |
| def Normalize(inputs): | |
| l = inputs[:, :, 0:1] | |
| ab = inputs[:, :, 1:3] | |
| l = l - 50 | |
| lab = np.concatenate((l, ab), 2) | |
| return lab.astype('float32') | |
| def numpy2tensor(inputs): | |
| out = torch.from_numpy(inputs.transpose(2,0,1)) | |
| return out | |
| def tensor2numpy(inputs): | |
| out = inputs[0,...].detach().cpu().numpy().transpose(1,2,0) | |
| return out | |
| def preprocessing(inputs): | |
| img_lab = Normalize(RGB2Lab(inputs)) | |
| img = np.array(inputs, 'float32') | |
| img = numpy2tensor(img) | |
| img_lab = numpy2tensor(img_lab) | |
| return img.unsqueeze(0), img_lab.unsqueeze(0) | |
| if __name__ == "__main__": | |
| parser = argparse.ArgumentParser(description="Colorize manga images.") | |
| parser.add_argument("-i", "--input_folder", type=str, required=True, help="Path to the input folder containing manga images.") | |
| parser.add_argument("-r", "--reference_image", type=str, required=True, help="Path to the reference image for colorization.") | |
| parser.add_argument("-ckpt", "--model_checkpoint", type=str, required=True, help="Path to the model checkpoint file.") | |
| parser.add_argument("-o", "--output_folder", type=str, required=True, help="Path to the output folder where colorized images will be saved.") | |
| args = parser.parse_args() | |
| device = "cuda" | |
| ckpt = torch.load(args.model_checkpoint, map_location=lambda storage, loc: storage) | |
| colorEncoder = ColorEncoder().to(device) | |
| colorEncoder.load_state_dict(ckpt["colorEncoder"]) | |
| colorEncoder.eval() | |
| colorUNet = ColorUNet().to(device) | |
| colorUNet.load_state_dict(ckpt["colorUNet"]) | |
| colorUNet.eval() | |
| reference_img = Image.open(args.reference_image).convert("RGB") | |
| reference_img, reference_img_lab = preprocessing(reference_img) | |
| reference_img = reference_img.to(device) | |
| reference_img_lab = reference_img_lab.to(device) | |
| for root, dirs, files in os.walk(args.input_folder): | |
| for file in files: | |
| if file.lower().endswith(('.png', '.jpg', '.jpeg', '.gif', '.bmp')): | |
| input_image_path = os.path.join(root, file) | |
| img, img_lab = preprocessing(Image.open(input_image_path).convert("RGB")) | |
| img = img.to(device) | |
| img_lab = img_lab.to(device) | |
| with torch.no_grad(): | |
| img_resize = F.interpolate(img / 255., size=(256, 256), mode='bilinear', recompute_scale_factor=False, align_corners=False) | |
| img_L_resize = F.interpolate(img_lab[:, :1, :, :] / 50., size=(256, 256), mode='bilinear', recompute_scale_factor=False, align_corners=False) | |
| color_vector = colorEncoder(img_resize) | |
| fake_ab = colorUNet((img_L_resize, color_vector)) | |
| fake_ab = F.interpolate(fake_ab * 110, size=(img.size(2), img.size(3)), mode='bilinear', recompute_scale_factor=False, align_corners=False) | |
| fake_img = torch.cat((img_lab[:, :1, :, :], fake_ab), 1) | |
| fake_img = Lab2RGB_out(fake_img) | |
| relative_path = os.path.relpath(input_image_path, args.input_folder) | |
| output_subfolder = os.path.join(args.output_folder, os.path.dirname(relative_path), 'color') | |
| mkdirs(output_subfolder) | |
| output_image_path = os.path.join(output_subfolder, f'{os.path.splitext(os.path.basename(input_image_path))[0]}_colorized.png') | |
| io.imsave(output_image_path, fake_img) | |
| print(f'Colored images have been saved to: {args.output_folder}') | |