Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -36,7 +36,8 @@ def stitch_rgbd_videos(
|
|
36 |
# For stitching: read the original video in full resolution (without downscaling).
|
37 |
full_frames, target_fps = read_video_frames(processed_video, max_len, target_fps, max_res=-1)
|
38 |
depths, _ = read_video_frames(depth_vis_video, max_len, target_fps, max_res=-1)
|
39 |
-
|
|
|
40 |
|
41 |
# For each frame, create a visual depth image from the inferenced depths.
|
42 |
d_min, d_max = depths.min(), depths.max()
|
@@ -45,11 +46,14 @@ def stitch_rgbd_videos(
|
|
45 |
rgb_full = full_frames[i] # Full-resolution RGB frame.
|
46 |
depth_frame = depths[i].astype(np.uint8) # Reduce memory footprint
|
47 |
|
48 |
-
|
49 |
-
|
|
|
|
|
|
|
50 |
|
51 |
# Normalize the depth frame to the range [0, 255].
|
52 |
-
depth_norm = ((depth_frame -
|
53 |
|
54 |
depth_norm = np.clip(depth_norm, 0, 255)
|
55 |
|
|
|
36 |
# For stitching: read the original video in full resolution (without downscaling).
|
37 |
full_frames, target_fps = read_video_frames(processed_video, max_len, target_fps, max_res=-1)
|
38 |
depths, _ = read_video_frames(depth_vis_video, max_len, target_fps, max_res=-1)
|
39 |
+
|
40 |
+
print(f"Depth frame shape: {depths[0].shape}, dtype: {depths[0].dtype}, min: {depths[0].min()}, max: {depths[0].max()}")
|
41 |
|
42 |
# For each frame, create a visual depth image from the inferenced depths.
|
43 |
d_min, d_max = depths.min(), depths.max()
|
|
|
46 |
rgb_full = full_frames[i] # Full-resolution RGB frame.
|
47 |
depth_frame = depths[i].astype(np.uint8) # Reduce memory footprint
|
48 |
|
49 |
+
print(f"Depth range: min={d_min}, max={d_max}, diff={d_max-d_min}")
|
50 |
+
|
51 |
+
# Add a small buffer to ensure range is never zero
|
52 |
+
d_min_adj = max(0, d_min - 10)
|
53 |
+
d_max_adj = min(255, d_max + 10)
|
54 |
|
55 |
# Normalize the depth frame to the range [0, 255].
|
56 |
+
depth_norm = ((depth_frame - d_min_adj) / (d_max_adj - d_min_adj) * 255).astype(np.uint8)
|
57 |
|
58 |
depth_norm = np.clip(depth_norm, 0, 255)
|
59 |
|