Keiser41 commited on
Commit
dc7f2ae
·
1 Parent(s): 1e117bb

Update pintar.py

Browse files
Files changed (1) hide show
  1. pintar.py +16 -20
pintar.py CHANGED
@@ -36,23 +36,13 @@ def numpy2tensor(inputs):
36
  out = torch.from_numpy(inputs.transpose(2,0,1))
37
  return out
38
 
39
- def tensor2numpy(inputs):
40
- out = inputs[0,...].detach().cpu().numpy().transpose(1,2,0)
41
- return out
42
-
43
- def preprocessing(inputs):
44
- img_lab = Normalize(RGB2Lab(inputs))
45
- img = np.array(inputs, 'float32')
46
- img = numpy2tensor(img)
47
- img_lab = numpy2tensor(img_lab)
48
- return img.unsqueeze(0), img_lab.unsqueeze(0)
49
-
50
  if __name__ == "__main__":
51
  parser = argparse.ArgumentParser(description="Colorize manga images.")
52
  parser.add_argument("-i", "--input_folder", type=str, required=True, help="Path to the input folder containing manga images.")
53
  parser.add_argument("-r", "--reference_image", type=str, required=True, help="Path to the reference image for colorization.")
54
  parser.add_argument("-ckpt", "--model_checkpoint", type=str, required=True, help="Path to the model checkpoint file.")
55
  parser.add_argument("-o", "--output_folder", type=str, required=True, help="Path to the output folder where colorized images will be saved.")
 
56
  args = parser.parse_args()
57
 
58
  device = "cuda"
@@ -68,29 +58,35 @@ if __name__ == "__main__":
68
  colorUNet.eval()
69
 
70
  reference_img = Image.open(args.reference_image).convert("RGB")
71
- reference_img, reference_img_lab = preprocessing(reference_img)
72
- reference_img = reference_img.to(device)
73
- reference_img_lab = reference_img_lab.to(device)
 
 
74
 
75
  for root, dirs, files in os.walk(args.input_folder):
76
  for file in files:
77
  if file.lower().endswith(('.png', '.jpg', '.jpeg', '.gif', '.bmp')):
78
  input_image_path = os.path.join(root, file)
79
 
80
- img, img_lab = preprocessing(Image.open(input_image_path).convert("RGB"))
81
- img = img.to(device)
82
- img_lab = img_lab.to(device)
 
 
 
83
 
84
  with torch.no_grad():
85
- img_resize = F.interpolate(img / 255., size=(256, 256), mode='bilinear', recompute_scale_factor=False, align_corners=False)
86
- img_L_resize = F.interpolate(img_lab[:, :1, :, :] / 50., size=(256, 256), mode='bilinear', recompute_scale_factor=False, align_corners=False)
87
 
88
  color_vector = colorEncoder(img_resize)
89
  fake_ab = colorUNet((img_L_resize, color_vector))
90
- fake_ab = F.interpolate(fake_ab * 110, size=(img.size(2), img.size(3)), mode='bilinear', recompute_scale_factor=False, align_corners=False)
91
 
92
  fake_img = torch.cat((img_lab[:, :1, :, :], fake_ab), 1)
93
  fake_img = Lab2RGB_out(fake_img)
 
94
 
95
  relative_path = os.path.relpath(input_image_path, args.input_folder)
96
  output_subfolder = os.path.join(args.output_folder, os.path.dirname(relative_path), 'color')
 
36
  out = torch.from_numpy(inputs.transpose(2,0,1))
37
  return out
38
 
 
 
 
 
 
 
 
 
 
 
 
39
  if __name__ == "__main__":
40
  parser = argparse.ArgumentParser(description="Colorize manga images.")
41
  parser.add_argument("-i", "--input_folder", type=str, required=True, help="Path to the input folder containing manga images.")
42
  parser.add_argument("-r", "--reference_image", type=str, required=True, help="Path to the reference image for colorization.")
43
  parser.add_argument("-ckpt", "--model_checkpoint", type=str, required=True, help="Path to the model checkpoint file.")
44
  parser.add_argument("-o", "--output_folder", type=str, required=True, help="Path to the output folder where colorized images will be saved.")
45
+ parser.add_argument("-ne", "--no_extractor", action="store_true", help="Do not segment the manga panels.")
46
  args = parser.parse_args()
47
 
48
  device = "cuda"
 
58
  colorUNet.eval()
59
 
60
  reference_img = Image.open(args.reference_image).convert("RGB")
61
+ reference_img = np.array(reference_img).astype(np.float32) / 255.0 # Asegúrate de que la referencia esté en el rango [0, 1]
62
+ reference_img_lab = RGB2Lab(reference_img)
63
+ reference_img_lab = Normalize(reference_img_lab)
64
+ reference_img_lab = numpy2tensor(reference_img_lab)
65
+ reference_img_lab = reference_img_lab.to(device).unsqueeze(0)
66
 
67
  for root, dirs, files in os.walk(args.input_folder):
68
  for file in files:
69
  if file.lower().endswith(('.png', '.jpg', '.jpeg', '.gif', '.bmp')):
70
  input_image_path = os.path.join(root, file)
71
 
72
+ img = Image.open(input_image_path).convert("RGB")
73
+ img = np.array(img).astype(np.float32) / 255.0 # Asegúrate de que la imagen de entrada esté en el rango [0, 1]
74
+ img_lab = RGB2Lab(img)
75
+ img_lab = Normalize(img_lab)
76
+ img_lab = numpy2tensor(img_lab)
77
+ img_lab = img_lab.to(device).unsqueeze(0)
78
 
79
  with torch.no_grad():
80
+ img_resize = F.interpolate(img_lab / 110., size=(256, 256), mode='bilinear', recompute_scale_factor=False, align_corners=False)
81
+ img_L_resize = F.interpolate(img_resize[:, :1, :, :] / 50., size=(256, 256), mode='bilinear', recompute_scale_factor=False, align_corners=False)
82
 
83
  color_vector = colorEncoder(img_resize)
84
  fake_ab = colorUNet((img_L_resize, color_vector))
85
+ fake_ab = F.interpolate(fake_ab, size=(img.shape[0], img.shape[1]), mode='bilinear', recompute_scale_factor=False, align_corners=False)
86
 
87
  fake_img = torch.cat((img_lab[:, :1, :, :], fake_ab), 1)
88
  fake_img = Lab2RGB_out(fake_img)
89
+ fake_img = (fake_img * 255).astype(np.uint8) # Convierte de nuevo a [0, 255]
90
 
91
  relative_path = os.path.relpath(input_image_path, args.input_folder)
92
  output_subfolder = os.path.join(args.output_folder, os.path.dirname(relative_path), 'color')