Datasets:
Tasks:
Image Classification
Sub-tasks:
multi-class-image-classification
Languages:
English
Size:
1M<n<10M
ArXiv:
License:
Update ForNet.py
Browse files
ForNet.py
CHANGED
@@ -1126,16 +1126,38 @@ class RecombineDataset(Dataset):
|
|
1126 |
bg_rat_indices = super()._getitem(0)["bg_rat_idx_file"]
|
1127 |
self.train = "train" in bg_rat_indices.split("/")[-1]
|
1128 |
|
1129 |
-
|
1130 |
-
|
1131 |
-
|
1132 |
-
|
1133 |
-
|
1134 |
-
|
1135 |
-
|
1136 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1137 |
if self.background_combination == "all":
|
1138 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1139 |
|
1140 |
@property
|
1141 |
def total_epochs(self):
|
@@ -1176,14 +1198,24 @@ class RecombineDataset(Dataset):
|
|
1176 |
if self.background_combination == "orig":
|
1177 |
bg_item = fg_item
|
1178 |
elif self.background_combination == "same":
|
1179 |
-
|
1180 |
-
|
1181 |
-
|
|
|
|
|
|
|
|
|
|
|
1182 |
else:
|
1183 |
# all
|
1184 |
-
|
1185 |
-
|
1186 |
-
|
|
|
|
|
|
|
|
|
|
|
1187 |
|
1188 |
fg_img = fg_item["fg"].convert("RGBA")
|
1189 |
bg_img = bg_item["bg"].convert("RGB")
|
@@ -1499,6 +1531,7 @@ class ForNet(datasets.GeneratorBasedBuilder):
|
|
1499 |
|
1500 |
def _generate_examples(self, patch_files, split, hf_indices, cls_to_idx_loc, fg_bg_ratios):
|
1501 |
logger.info(f"Generating examples from {len(patch_files)} patch files")
|
|
|
1502 |
logger.info("Opening files")
|
1503 |
class_to_zipfile = {}
|
1504 |
for f in patch_files:
|
@@ -1590,7 +1623,7 @@ class ForNet(datasets.GeneratorBasedBuilder):
|
|
1590 |
for proc in zip_procs:
|
1591 |
proc.join()
|
1592 |
|
1593 |
-
|
1594 |
while not ret_queue.empty():
|
1595 |
data = ret_queue.get()
|
1596 |
in_cls = data["path"].split("/")[0]
|
@@ -1599,7 +1632,7 @@ class ForNet(datasets.GeneratorBasedBuilder):
|
|
1599 |
cls_to_idx[in_cls].append(foraug_idx)
|
1600 |
yield foraug_idx, data
|
1601 |
foraug_idx += 1
|
1602 |
-
tqdm.write("Done")
|
1603 |
with open(cls_to_idx_loc, "w") as f:
|
1604 |
json.dump(cls_to_idx, f)
|
1605 |
|
@@ -1760,3 +1793,4 @@ def _zip_loader(
|
|
1760 |
"fg/bg_area": fg_bg_ratios[patch_name],
|
1761 |
}
|
1762 |
)
|
|
|
|
1126 |
bg_rat_indices = super()._getitem(0)["bg_rat_idx_file"]
|
1127 |
self.train = "train" in bg_rat_indices.split("/")[-1]
|
1128 |
|
1129 |
+
bg_rat_idx_file = bg_rat_indices
|
1130 |
+
if self.background_combination == "same":
|
1131 |
+
try:
|
1132 |
+
with open(bg_rat_indices, "r") as f:
|
1133 |
+
bg_rat_indices = json.load(f)
|
1134 |
+
for in_cls in bg_rat_indices:
|
1135 |
+
if in_cls not in self.cls_to_idx:
|
1136 |
+
self.cls_to_idx[in_cls] = []
|
1137 |
+
for idx, rat in bg_rat_indices[in_cls]:
|
1138 |
+
if rat < self.pruning_ratio:
|
1139 |
+
self.cls_to_idx[in_cls].append(idx)
|
1140 |
+
except (TypeError, KeyError, OSError):
|
1141 |
+
logger.warning(
|
1142 |
+
f"Could not load background ratio indices from {bg_rat_indices}. Will do pruning and background selection on the fly. This will take more time in the first few epochs"
|
1143 |
+
)
|
1144 |
+
self.cls_to_idx = {cls: list(range(len(self))) for cls in IMAGENET2012_CLASSES.keys()}
|
1145 |
+
|
1146 |
if self.background_combination == "all":
|
1147 |
+
try:
|
1148 |
+
self.cls_to_idx["all"] = []
|
1149 |
+
with open(bg_rat_indices, "r") as f:
|
1150 |
+
bg_rat_indices = json.load(f)
|
1151 |
+
for in_cls in bg_rat_indices:
|
1152 |
+
for idx, rat in bg_rat_indices[in_cls]:
|
1153 |
+
if rat < self.pruning_ratio:
|
1154 |
+
self.cls_to_idx["all"].append(idx)
|
1155 |
+
except (TypeError, KeyError, OSError) as e:
|
1156 |
+
logger.warning(f"Error {e} while extracting bg_rat_indices")
|
1157 |
+
logger.warning(
|
1158 |
+
f"Could not load background ratio indices from {bg_rat_idx_file}. Will do pruning and background selection on the fly. This will take more time in the first few epochs"
|
1159 |
+
)
|
1160 |
+
self.cls_to_idx["all"] = list(range(len(self)))
|
1161 |
|
1162 |
@property
|
1163 |
def total_epochs(self):
|
|
|
1198 |
if self.background_combination == "orig":
|
1199 |
bg_item = fg_item
|
1200 |
elif self.background_combination == "same":
|
1201 |
+
while True:
|
1202 |
+
rand_idx = np.random.randint(len(self.cls_to_idx[in_cls]))
|
1203 |
+
rand_idx = self.cls_to_idx[in_cls][rand_idx]
|
1204 |
+
bg_item = super()._getitem(rand_idx)
|
1205 |
+
if bg_item["fg/bg_area"] < self.pruning_ratio and bg_item["label"] == fg_item["label"]:
|
1206 |
+
break
|
1207 |
+
else:
|
1208 |
+
self.cls_to_idx[in_cls].remove(rand_idx)
|
1209 |
else:
|
1210 |
# all
|
1211 |
+
while True:
|
1212 |
+
rand_idx = np.random.randint(len(self.cls_to_idx["all"]))
|
1213 |
+
rand_idx = self.cls_to_idx["all"][rand_idx]
|
1214 |
+
bg_item = super()._getitem(rand_idx)
|
1215 |
+
if bg_item["fg/bg_area"] < self.pruning_ratio:
|
1216 |
+
break
|
1217 |
+
else:
|
1218 |
+
self.cls_to_idx["all"].remove(rand_idx)
|
1219 |
|
1220 |
fg_img = fg_item["fg"].convert("RGBA")
|
1221 |
bg_img = bg_item["bg"].convert("RGB")
|
|
|
1531 |
|
1532 |
def _generate_examples(self, patch_files, split, hf_indices, cls_to_idx_loc, fg_bg_ratios):
|
1533 |
logger.info(f"Generating examples from {len(patch_files)} patch files")
|
1534 |
+
logger.info(f"will save cls_to_idx to '{cls_to_idx_loc}'")
|
1535 |
logger.info("Opening files")
|
1536 |
class_to_zipfile = {}
|
1537 |
for f in patch_files:
|
|
|
1623 |
for proc in zip_procs:
|
1624 |
proc.join()
|
1625 |
|
1626 |
+
tqdm.write("Finished all processes")
|
1627 |
while not ret_queue.empty():
|
1628 |
data = ret_queue.get()
|
1629 |
in_cls = data["path"].split("/")[0]
|
|
|
1632 |
cls_to_idx[in_cls].append(foraug_idx)
|
1633 |
yield foraug_idx, data
|
1634 |
foraug_idx += 1
|
1635 |
+
tqdm.write(f"Done generating {split} examples. Saving cls_to_idx file at '{cls_to_idx_loc}'.")
|
1636 |
with open(cls_to_idx_loc, "w") as f:
|
1637 |
json.dump(cls_to_idx, f)
|
1638 |
|
|
|
1793 |
"fg/bg_area": fg_bg_ratios[patch_name],
|
1794 |
}
|
1795 |
)
|
1796 |
+
|