Spaces:
Sleeping
Sleeping
Create app.py
Browse files
app.py
ADDED
|
@@ -0,0 +1,102 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import gradio as gr
|
| 2 |
+
import cv2
|
| 3 |
+
import numpy as np
|
| 4 |
+
|
| 5 |
+
def exposure_fusion(images):
|
| 6 |
+
try:
|
| 7 |
+
# Convert PIL images (RGB) to OpenCV format (BGR)
|
| 8 |
+
images_cv = [cv2.cvtColor(np.array(img), cv2.COLOR_RGB2BGR) for img in images]
|
| 9 |
+
|
| 10 |
+
# Align images using AlignMTB
|
| 11 |
+
align_mtb = cv2.createAlignMTB()
|
| 12 |
+
aligned_images = images_cv.copy()
|
| 13 |
+
align_mtb.process(images_cv, aligned_images)
|
| 14 |
+
|
| 15 |
+
# Merge images using exposure fusion (Mertens)
|
| 16 |
+
merge_mertens = cv2.createMergeMertens()
|
| 17 |
+
fused = merge_mertens.process(aligned_images)
|
| 18 |
+
|
| 19 |
+
# Convert result from float32 to uint8 and back to RGB
|
| 20 |
+
fused = np.clip(fused * 255, 0, 255).astype('uint8')
|
| 21 |
+
fused = cv2.cvtColor(fused, cv2.COLOR_BGR2RGB)
|
| 22 |
+
return fused
|
| 23 |
+
except Exception as e:
|
| 24 |
+
return f"Error: {e}"
|
| 25 |
+
|
| 26 |
+
def stabilize_crop_and_exposure_fusion(images):
|
| 27 |
+
try:
|
| 28 |
+
# Convert images from PIL (RGB) to OpenCV format (BGR)
|
| 29 |
+
images_cv = [cv2.cvtColor(np.array(img), cv2.COLOR_RGB2BGR) for img in images]
|
| 30 |
+
|
| 31 |
+
# Align images using AlignMTB
|
| 32 |
+
align_mtb = cv2.createAlignMTB()
|
| 33 |
+
aligned_images = images_cv.copy()
|
| 34 |
+
align_mtb.process(images_cv, aligned_images)
|
| 35 |
+
|
| 36 |
+
# Determine valid regions in each image (to remove black borders)
|
| 37 |
+
bounding_rects = []
|
| 38 |
+
for img in aligned_images:
|
| 39 |
+
gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
|
| 40 |
+
# Pixels above a small threshold are considered valid
|
| 41 |
+
_, mask = cv2.threshold(gray, 10, 255, cv2.THRESH_BINARY)
|
| 42 |
+
coords = cv2.findNonZero(mask)
|
| 43 |
+
if coords is not None:
|
| 44 |
+
x, y, w, h = cv2.boundingRect(coords)
|
| 45 |
+
bounding_rects.append((x, y, w, h))
|
| 46 |
+
else:
|
| 47 |
+
bounding_rects.append((0, 0, img.shape[1], img.shape[0]))
|
| 48 |
+
|
| 49 |
+
# Compute the common intersection rectangle
|
| 50 |
+
if not bounding_rects:
|
| 51 |
+
return "No valid images provided."
|
| 52 |
+
x_min, y_min, w, h = bounding_rects[0]
|
| 53 |
+
x_max = x_min + w
|
| 54 |
+
y_max = y_min + h
|
| 55 |
+
for (x, y, w, h) in bounding_rects[1:]:
|
| 56 |
+
x_min = max(x_min, x)
|
| 57 |
+
y_min = max(y_min, y)
|
| 58 |
+
x_max = min(x_max, x + w)
|
| 59 |
+
y_max = min(y_max, y + h)
|
| 60 |
+
if x_max <= x_min or y_max <= y_min:
|
| 61 |
+
return "Images do not overlap enough for cropping."
|
| 62 |
+
|
| 63 |
+
# Crop each aligned image to the intersection region
|
| 64 |
+
cropped_images = [img[y_min:y_max, x_min:x_max] for img in aligned_images]
|
| 65 |
+
|
| 66 |
+
# Merge the cropped images using exposure fusion (Mertens)
|
| 67 |
+
merge_mertens = cv2.createMergeMertens()
|
| 68 |
+
fused = merge_mertens.process(cropped_images)
|
| 69 |
+
fused = np.clip(fused * 255, 0, 255).astype('uint8')
|
| 70 |
+
fused = cv2.cvtColor(fused, cv2.COLOR_BGR2RGB)
|
| 71 |
+
return fused
|
| 72 |
+
except Exception as e:
|
| 73 |
+
return f"Error: {e}"
|
| 74 |
+
|
| 75 |
+
def process_images(images, advanced):
|
| 76 |
+
if not images:
|
| 77 |
+
return None
|
| 78 |
+
# If advanced option is selected, use stabilization & cropping before fusion.
|
| 79 |
+
if advanced:
|
| 80 |
+
return stabilize_crop_and_exposure_fusion(images)
|
| 81 |
+
else:
|
| 82 |
+
return exposure_fusion(images)
|
| 83 |
+
|
| 84 |
+
# Gradio Interface: Upload multiple images and choose the processing method.
|
| 85 |
+
inputs = [
|
| 86 |
+
gr.File(type="file", label="Upload Images", file_count="multiple"),
|
| 87 |
+
gr.Checkbox(label="Advanced: Stabilize & Crop Before Fusion", value=False)
|
| 88 |
+
]
|
| 89 |
+
|
| 90 |
+
iface = gr.Interface(
|
| 91 |
+
fn=process_images,
|
| 92 |
+
inputs=inputs,
|
| 93 |
+
outputs="image",
|
| 94 |
+
title="Exposure Fusion with Stabilization",
|
| 95 |
+
description=(
|
| 96 |
+
"Upload multiple images with varying exposures. "
|
| 97 |
+
"If 'Advanced: Stabilize & Crop Before Fusion' is selected, "
|
| 98 |
+
"the app aligns the images, crops out extra borders, then fuses them."
|
| 99 |
+
),
|
| 100 |
+
)
|
| 101 |
+
|
| 102 |
+
iface.launch()
|