DmitrMakeev commited on
Commit
542e45a
·
1 Parent(s): 97d1349

Upload 6 files

Browse files
utils/__init__.py ADDED
File without changes
utils/cielab.py ADDED
@@ -0,0 +1,71 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from functools import partial
2
+ import numpy as np
3
+
4
+ class ABGamut:
5
+ RESOURCE_POINTS = "./utils/gamut_pts.npy"
6
+ RESOURCE_PRIOR = "./utils/gamut_probs.npy"
7
+ DTYPE = np.float32
8
+ EXPECTED_SIZE = 313
9
+ def __init__(self):
10
+ self.points = np.load(self.RESOURCE_POINTS).astype(self.DTYPE)
11
+ self.prior = np.load(self.RESOURCE_PRIOR).astype(self.DTYPE)
12
+ assert self.points.shape == (self.EXPECTED_SIZE, 2)
13
+ assert self.prior.shape == (self.EXPECTED_SIZE,)
14
+
15
+
16
+ class CIELAB:
17
+ L_MEAN = 50
18
+ AB_BINSIZE = 10
19
+ AB_RANGE = [-110 - AB_BINSIZE // 2, 110 + AB_BINSIZE // 2, AB_BINSIZE]
20
+ AB_DTYPE = np.float32
21
+ Q_DTYPE = np.int64
22
+
23
+ RGB_RESOLUTION = 101
24
+ RGB_RANGE = [0, 1, RGB_RESOLUTION]
25
+ RGB_DTYPE = np.float64
26
+
27
+ def __init__(self, gamut=None):
28
+ self.gamut = gamut if gamut is not None else ABGamut()
29
+ a, b, self.ab = self._get_ab()
30
+ self.ab_gamut_mask = self._get_ab_gamut_mask(
31
+ a, b, self.ab, self.gamut)
32
+
33
+ self.ab_to_q = self._get_ab_to_q(self.ab_gamut_mask)
34
+ self.q_to_ab = self._get_q_to_ab(self.ab, self.ab_gamut_mask)
35
+
36
+ @classmethod
37
+ def _get_ab(cls):
38
+ a = np.arange(*cls.AB_RANGE, dtype=cls.AB_DTYPE)
39
+ b = np.arange(*cls.AB_RANGE, dtype=cls.AB_DTYPE)
40
+ b_, a_ = np.meshgrid(a, b)
41
+ ab = np.dstack((a_, b_))
42
+ return a, b, ab
43
+
44
+ @classmethod
45
+ def _get_ab_gamut_mask(cls, a, b, ab, gamut):
46
+ ab_gamut_mask = np.full(ab.shape[:-1], False, dtype=bool)
47
+ a = np.digitize(gamut.points[:, 0], a) - 1
48
+ b = np.digitize(gamut.points[:, 1], b) - 1
49
+ for a_, b_ in zip(a, b):
50
+ ab_gamut_mask[a_, b_] = True
51
+
52
+ return ab_gamut_mask
53
+
54
+ @classmethod
55
+ def _get_ab_to_q(cls, ab_gamut_mask):
56
+ ab_to_q = np.full(ab_gamut_mask.shape, -1, dtype=cls.Q_DTYPE)
57
+ ab_to_q[ab_gamut_mask] = np.arange(np.count_nonzero(ab_gamut_mask))
58
+
59
+ return ab_to_q
60
+
61
+ @classmethod
62
+ def _get_q_to_ab(cls, ab, ab_gamut_mask):
63
+ return ab[ab_gamut_mask] + cls.AB_BINSIZE / 2
64
+
65
+ def bin_ab(self, ab):
66
+ ab_discrete = ((ab + 110) / self.AB_RANGE[2]).astype(int)
67
+
68
+ a, b = np.hsplit(ab_discrete.reshape(-1, 2), 2)
69
+
70
+ return self.ab_to_q[a, b].reshape(*ab.shape[:2])
71
+
utils/dataset_lab.py ADDED
@@ -0,0 +1,37 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from __future__ import print_function, division
2
+ import torch, os, glob
3
+ from torch.utils.data import Dataset, DataLoader
4
+ import numpy as np
5
+ from PIL import Image
6
+ import cv2
7
+
8
+
9
+ class LabDataset(Dataset):
10
+
11
+ def __init__(self, rootdir=None, filelist=None, resize=None):
12
+
13
+ if filelist:
14
+ self.file_list = filelist
15
+ else:
16
+ assert os.path.exists(rootdir), "@dir:'%s' NOT exist ..."%rootdir
17
+ self.file_list = glob.glob(os.path.join(rootdir, '*.*'))
18
+ self.file_list.sort()
19
+ self.resize = resize
20
+
21
+ def __len__(self):
22
+ return len(self.file_list)
23
+
24
+ def __getitem__(self, idx):
25
+ bgr_img = cv2.imread(self.file_list[idx], cv2.IMREAD_COLOR)
26
+ if self.resize:
27
+ bgr_img = cv2.resize(bgr_img, (self.resize,self.resize), interpolation=cv2.INTER_CUBIC)
28
+ bgr_img = np.array(bgr_img / 255., np.float32)
29
+ lab_img = cv2.cvtColor(bgr_img, cv2.COLOR_BGR2LAB)
30
+ #print('--------L:', np.min(lab_img[:,:,0]), np.max(lab_img[:,:,0]))
31
+ #print('--------ab:', np.min(lab_img[:,:,1:3]), np.max(lab_img[:,:,1:3]))
32
+ lab_img = torch.from_numpy(lab_img.transpose((2, 0, 1)))
33
+ bgr_img = torch.from_numpy(bgr_img.transpose((2, 0, 1)))
34
+ gray_img = (lab_img[0:1,:,:]-50.) / 50.
35
+ color_map = lab_img[1:3,:,:] / 110.
36
+ bgr_img = bgr_img*2. - 1.
37
+ return {'gray': gray_img, 'color': color_map, 'BGR': bgr_img}
utils/gamut_probs.npy ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:19d00659c7d6f6ee47456fd2c19c86a073f7124875e3d5ab9d601864e062b56c
3
+ size 2584
utils/gamut_pts.npy ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b5dec01315c34f43f1c8c089e84c45ae35d1838d8e77ed0e7ca930f79ffa450e
3
+ size 5088
utils/util.py ADDED
@@ -0,0 +1,178 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from __future__ import division
2
+ from __future__ import print_function
3
+ import os, glob, shutil, math, json
4
+ from queue import Queue
5
+ from threading import Thread
6
+ from skimage.segmentation import mark_boundaries
7
+ import numpy as np
8
+ from PIL import Image
9
+ import cv2, torch
10
+
11
+ def get_gauss_kernel(size, sigma):
12
+ '''Function to mimic the 'fspecial' gaussian MATLAB function'''
13
+ x, y = np.mgrid[-size//2 + 1:size//2 + 1, -size//2 + 1:size//2 + 1]
14
+ g = np.exp(-((x**2 + y**2)/(2.0*sigma**2)))
15
+ return g/g.sum()
16
+
17
+
18
+ def batchGray2Colormap(gray_batch):
19
+ colormap = plt.get_cmap('viridis')
20
+ heatmap_batch = []
21
+ for i in range(gray_batch.shape[0]):
22
+ # quantize [-1,1] to {0,1}
23
+ gray_map = gray_batch[i, :, :, 0]
24
+ heatmap = (colormap(gray_map) * 2**16).astype(np.uint16)[:,:,:3]
25
+ heatmap_batch.append(heatmap/127.5-1.0)
26
+ return np.array(heatmap_batch)
27
+
28
+
29
+ class PlotterThread():
30
+ '''log tensorboard data in a background thread to save time'''
31
+ def __init__(self, writer):
32
+ self.writer = writer
33
+ self.task_queue = Queue(maxsize=0)
34
+ worker = Thread(target=self.do_work, args=(self.task_queue,))
35
+ worker.setDaemon(True)
36
+ worker.start()
37
+
38
+ def do_work(self, q):
39
+ while True:
40
+ content = q.get()
41
+ if content[-1] == 'image':
42
+ self.writer.add_image(*content[:-1])
43
+ elif content[-1] == 'scalar':
44
+ self.writer.add_scalar(*content[:-1])
45
+ else:
46
+ raise ValueError
47
+ q.task_done()
48
+
49
+ def add_data(self, name, value, step, data_type='scalar'):
50
+ self.task_queue.put([name, value, step, data_type])
51
+
52
+ def __len__(self):
53
+ return self.task_queue.qsize()
54
+
55
+
56
+ def save_images_from_batch(img_batch, save_dir, filename_list, batch_no=-1, suffix=None):
57
+ N,H,W,C = img_batch.shape
58
+ if C == 3:
59
+ #! rgb color image
60
+ for i in range(N):
61
+ # [-1,1] >>> [0,255]
62
+ image = Image.fromarray((127.5*(img_batch[i,:,:,:]+1.)).astype(np.uint8))
63
+ save_name = filename_list[i] if batch_no==-1 else '%05d.png' % (batch_no*N+i)
64
+ save_name = save_name.replace('.png', '-%s.png'%suffix) if suffix else save_name
65
+ image.save(os.path.join(save_dir, save_name), 'PNG')
66
+ elif C == 1:
67
+ #! single-channel gray image
68
+ for i in range(N):
69
+ # [-1,1] >>> [0,255]
70
+ image = Image.fromarray((127.5*(img_batch[i,:,:,0]+1.)).astype(np.uint8))
71
+ save_name = filename_list[i] if batch_no==-1 else '%05d.png' % (batch_no*img_batch.shape[0]+i)
72
+ save_name = save_name.replace('.png', '-%s.png'%suffix) if suffix else save_name
73
+ image.save(os.path.join(save_dir, save_name), 'PNG')
74
+ else:
75
+ #! multi-channel: save each channel as a single image
76
+ for i in range(N):
77
+ # [-1,1] >>> [0,255]
78
+ for j in range(C):
79
+ image = Image.fromarray((127.5*(img_batch[i,:,:,j]+1.)).astype(np.uint8))
80
+ if batch_no == -1:
81
+ _, file_name = os.path.split(filename_list[i])
82
+ name_only, _ = os.path.os.path.splitext(file_name)
83
+ save_name = name_only + '_c%d.png' % j
84
+ else:
85
+ save_name = '%05d_c%d.png' % (batch_no*N+i, j)
86
+ save_name = save_name.replace('.png', '-%s.png'%suffix) if suffix else save_name
87
+ image.save(os.path.join(save_dir, save_name), 'PNG')
88
+ return None
89
+
90
+
91
+ def save_normLabs_from_batch(img_batch, save_dir, filename_list, batch_no=-1, suffix=None):
92
+ N,H,W,C = img_batch.shape
93
+ if C != 3:
94
+ print('@Warning:the Lab images are NOT in 3 channels!')
95
+ return None
96
+ # denormalization: L: (L+1.0)*50.0 | a: a*110.0| b: b*110.0
97
+ img_batch[:,:,:,0] = img_batch[:,:,:,0] * 50.0 + 50.0
98
+ img_batch[:,:,:,1:3] = img_batch[:,:,:,1:3] * 110.0
99
+ #! convert into RGB color image
100
+ for i in range(N):
101
+ rgb_img = cv2.cvtColor(img_batch[i,:,:,:], cv2.COLOR_LAB2RGB)
102
+ image = Image.fromarray((rgb_img*255.0).astype(np.uint8))
103
+ save_name = filename_list[i] if batch_no==-1 else '%05d.png' % (batch_no*N+i)
104
+ save_name = save_name.replace('.png', '-%s.png'%suffix) if suffix else save_name
105
+ image.save(os.path.join(save_dir, save_name), 'PNG')
106
+ return None
107
+
108
+
109
+ def save_markedSP_from_batch(img_batch, spix_batch, save_dir, filename_list, batch_no=-1, suffix=None):
110
+ N,H,W,C = img_batch.shape
111
+ #! img_batch: BGR nd-array (range:0~1)
112
+ #! map_batch: single-channel spixel map
113
+ #print('----------', img_batch.shape, spix_batch.shape)
114
+ for i in range(N):
115
+ norm_image = img_batch[i,:,:,:]*0.5+0.5
116
+ spixel_bd_image = mark_boundaries(norm_image, spix_batch[i,:,:,0].astype(int), color=(1,1,1))
117
+ #spixel_bd_image = cv2.cvtColor(spixel_bd_image, cv2.COLOR_BGR2RGB)
118
+ image = Image.fromarray((spixel_bd_image*255.0).astype(np.uint8))
119
+ save_name = filename_list[i] if batch_no==-1 else '%05d.png' % (batch_no*N+i)
120
+ save_name = save_name.replace('.png', '-%s.png'%suffix) if suffix else save_name
121
+ image.save(os.path.join(save_dir, save_name), 'PNG')
122
+ return None
123
+
124
+
125
+ def get_filelist(data_dir):
126
+ file_list = glob.glob(os.path.join(data_dir, '*.*'))
127
+ file_list.sort()
128
+ return file_list
129
+
130
+
131
+ def collect_filenames(data_dir):
132
+ file_list = get_filelist(data_dir)
133
+ name_list = []
134
+ for file_path in file_list:
135
+ _, file_name = os.path.split(file_path)
136
+ name_list.append(file_name)
137
+ name_list.sort()
138
+ return name_list
139
+
140
+
141
+ def exists_or_mkdir(path, need_remove=False):
142
+ if not os.path.exists(path):
143
+ os.makedirs(path)
144
+ elif need_remove:
145
+ shutil.rmtree(path)
146
+ os.makedirs(path)
147
+ return None
148
+
149
+
150
+ def save_list(save_path, data_list, append_mode=False):
151
+ n = len(data_list)
152
+ if append_mode:
153
+ with open(save_path, 'a') as f:
154
+ f.writelines([str(data_list[i]) + '\n' for i in range(n-1,n)])
155
+ else:
156
+ with open(save_path, 'w') as f:
157
+ f.writelines([str(data_list[i]) + '\n' for i in range(n)])
158
+ return None
159
+
160
+
161
+ def save_dict(save_path, dict):
162
+ json.dumps(dict, open(save_path,"w"))
163
+ return None
164
+
165
+
166
+ if __name__ == '__main__':
167
+ data_dir = '../PolyNet/PolyNet/cache/'
168
+ #visualizeLossCurves(data_dir)
169
+ clbar = GamutIndex()
170
+ ab, ab_gamut_mask = clbar._get_gamut_mask()
171
+ ab2q = clbar._get_ab_to_q(ab_gamut_mask)
172
+ q2ab = clbar._get_q_to_ab(ab, ab_gamut_mask)
173
+ maps = ab_gamut_mask*255.0
174
+ image = Image.fromarray(maps.astype(np.uint8))
175
+ image.save('gamut.png', 'PNG')
176
+ print(ab2q.shape)
177
+ print(q2ab.shape)
178
+ print('label range:', np.min(ab2q), np.max(ab2q))