TKOSEI commited on
Commit
c21de19
·
verified ·
1 Parent(s): 2d95390
Files changed (1) hide show
  1. app.py +498 -0
app.py ADDED
@@ -0,0 +1,498 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+
2
+ import cv2
3
+ import torch
4
+ from PIL import Image, ImageOps
5
+ import numpy as np
6
+ import gradio as gr
7
+ import math
8
+ import os
9
+ import zipfile
10
+ import trimesh
11
+ import pygltflib
12
+ from scipy.ndimage import median_filter
13
+ import requests # Import requests for downloading
14
+
15
+
16
+ # Depth-Anything V2 model setup
17
+ from depth_anything_v2.dpt import DepthAnythingV2
18
+
19
+ DEVICE = 'cuda' if torch.cuda.is_available() else 'mps' if torch.backends.mps.is_available() else 'cpu'
20
+
21
+ model_configs = {
22
+ 'vits': {'encoder': 'vits', 'features': 64, 'out_channels': [48, 96, 192, 384]},
23
+ 'vitb': {'encoder': 'vitb', 'features': 128, 'out_channels': [96, 192, 384, 768]},
24
+ 'vitl': {'encoder': 'vitl', 'features': 256, 'out_channels': [256, 512, 1024, 1024]},
25
+ 'vitg': {'encoder': 'vitg', 'features': 384, 'out_channels': [1536, 1536, 1536, 1536]}
26
+ }
27
+
28
+ encoder = 'vitl' # or 'vits', 'vitb', 'vitg'
29
+
30
+ model = DepthAnythingV2(**model_configs[encoder])
31
+
32
+ # Define model directory and path
33
+ MODEL_DIR = "models"
34
+ os.makedirs(MODEL_DIR, exist_ok=True)
35
+ model_filename = f'depth_anything_v2_{encoder}.pth'
36
+ model_path = os.path.join(MODEL_DIR, model_filename)
37
+
38
+ # Add code to download model weights if not exists
39
+ if not os.path.exists(model_path):
40
+ print(f"Downloading {model_path}...")
41
+ url = f"https://huggingface.co/depth-anything/Depth-Anything-V2-Large/resolve/main/{model_filename}"
42
+ response = requests.get(url, stream=True)
43
+ with open(model_path, "wb") as f:
44
+ for chunk in response.iter_content(chunk_size=8192):
45
+ f.write(chunk)
46
+ print("Download complete.")
47
+
48
+ model.load_state_dict(torch.load(model_path, map_location='cpu'))
49
+ model = model.to(DEVICE).eval()
50
+
51
+ # Helper functions (from your notebook)
52
+ def quaternion_multiply(q1, q2):
53
+ x1, y1, z1, w1 = q1
54
+ x2, y2, z2, w2 = q2
55
+ return [
56
+ w1 * x2 + x1 * w2 + y1 * z2 - z1 * y2,
57
+ w1 * y2 - x1 * z2 + y1 * w2 + z1 * x2,
58
+ w1 * z2 + x1 * y2 - y1 * x2 + z1 * w2,
59
+ w1 * w2 - x1 * x2 - y1 * y2 - z1 * z2,
60
+ ]
61
+
62
+
63
+ def glb_add_lights(path_input, path_output):
64
+ """
65
+ Adds directional lights in the horizontal plane to the glb file.
66
+ :param path_input: path to input glb
67
+ :param path_output: path to output glb
68
+ :return: None
69
+ """
70
+ glb = pygltflib.GLTF2().load(path_input)
71
+
72
+ N = 3 # default max num lights in Babylon.js is 4
73
+ angle_step = 2 * math.pi / N
74
+ elevation_angle = math.radians(75)
75
+
76
+ light_colors = [
77
+ [1.0, 0.0, 0.0],
78
+ [0.0, 1.0, 0.0],
79
+ [0.0, 0.0, 1.0],
80
+ ]
81
+
82
+ lights_extension = {
83
+ "lights": [
84
+ {"type": "directional", "color": light_colors[i], "intensity": 2.0}
85
+ for i in range(N)
86
+ ]
87
+ }
88
+
89
+ if "KHR_lights_punctual" not in glb.extensionsUsed:
90
+ glb.extensionsUsed.append("KHR_lights_punctual")
91
+ glb.extensions["KHR_lights_punctual"] = lights_extension
92
+
93
+ light_nodes = []
94
+ for i in range(N):
95
+ angle = i * angle_step
96
+
97
+ pos_rot = [0.0, 0.0, math.sin(angle / 2), math.cos(angle / 2)]
98
+ elev_rot = [
99
+ math.sin(elevation_angle / 2),
100
+ 0.0,
101
+ 0.0,
102
+ math.cos(elevation_angle / 2),
103
+ ]
104
+ rotation = quaternion_multiply(pos_rot, elev_rot)
105
+
106
+ node = {
107
+ "rotation": rotation,
108
+ "extensions": {"KHR_lights_punctual": {"light": i}},
109
+ }
110
+ light_nodes.append(node)
111
+
112
+ light_node_indices = list(range(len(glb.nodes), len(glb.nodes) + N))
113
+ glb.nodes.extend(light_nodes)
114
+
115
+ root_node_index = glb.scenes[glb.scene].nodes[0]
116
+ root_node = glb.nodes[root_node_index]
117
+ if hasattr(root_node, "children"):
118
+ root_node.children.extend(light_node_indices)
119
+ else:
120
+ root_node.children = light_node_indices
121
+
122
+ glb.save(path_output)
123
+
124
+
125
+ def extrude_depth_3d(
126
+ path_rgb,
127
+ path_depth,
128
+ path_out_base="../",
129
+ alpha=1.0,
130
+ invert=0,
131
+ output_model_scale=100,
132
+ filter_size=3,
133
+ coef_near=0.0,
134
+ coef_far=1.0,
135
+ emboss=0.3,
136
+ f_thic=0.05,
137
+ f_near=-0.15,
138
+ f_back=0.01,
139
+ vertex_colors=True,
140
+ scene_lights=True,
141
+ prepare_for_3d_printing=False,
142
+ zip_outputs=False,
143
+ ):
144
+ f_far_inner = -emboss
145
+ f_far_outer = f_far_inner - f_back
146
+
147
+ f_near = max(f_near, f_far_inner)
148
+
149
+ depth_image = Image.open(path_depth)
150
+ mono_image = Image.open(path_rgb).convert("L")
151
+
152
+ if invert==1:
153
+ mono_image = ImageOps.invert(mono_image)
154
+
155
+ w, h = depth_image.size
156
+ d_max = max(w, h)
157
+ depth_image = np.array(depth_image).astype(np.double)
158
+ mono_image = np.array(mono_image).astype(np.double)
159
+ z_min, z_max = np.min(depth_image), np.max(depth_image)
160
+ m_min, m_max = np.min(mono_image), np.max(mono_image)
161
+ depth_image = (depth_image.astype(np.double) - z_min) / (z_max - z_min)
162
+ depth_image[depth_image < coef_near] = coef_near
163
+ depth_image[depth_image > coef_far] = coef_far
164
+ z_min, z_max = np.min(depth_image), np.max(depth_image)
165
+ depth_image = (depth_image - z_min) / (z_max - z_min)
166
+ mono_image = median_filter(mono_image, size=5)
167
+ mono_image = (mono_image.astype(np.double) - m_min) / (m_max - m_min)
168
+ mono_image_new = np.where(depth_image == coef_far, 1, mono_image)
169
+ m_min=np.min(mono_image_new)
170
+ mono_image_new = np.where(depth_image == coef_far, 0, mono_image)
171
+ m_max=np.max(mono_image_new)
172
+ mono_image = np.where(depth_image == coef_far, m_min, mono_image)
173
+ mono_image = (mono_image - m_min) / (m_max - m_min)
174
+ depth_image = np.where(depth_image != 1.0, (1-alpha) * depth_image + alpha * mono_image, depth_image)
175
+ #depth_image_new[depth_image < coef_near] = 0
176
+ #depth_image_new[depth_image > coef_far] = 1
177
+ #depth_image_new[depth_image_new < 0] = 0
178
+ depth_image = median_filter(depth_image, size=filter_size)
179
+ depth_image = emboss*(depth_image - np.min(depth_image)) / (np.max(depth_image) - np.min(depth_image))
180
+ Image.fromarray((depth_image * 255).astype(np.uint8)).convert("L").save(path_out_base+".png")
181
+ rgb_image = np.array(
182
+ Image.open(path_rgb).convert("RGB").resize((w, h), Image.Resampling.LANCZOS)
183
+ )
184
+
185
+ w_norm = w / float(d_max - 1)
186
+ h_norm = h / float(d_max - 1)
187
+ w_half = w_norm / 2
188
+ h_half = h_norm / 2
189
+
190
+ x, y = np.meshgrid(np.arange(w), np.arange(h))
191
+ x = x / float(d_max - 1) - w_half # [-w_half, w_half]
192
+ y = -y / float(d_max - 1) + h_half # [-h_half, h_half]
193
+ z = -depth_image # -depth_emboss (far) - 0 (near)
194
+ vertices_2d = np.stack((x, y, z), axis=-1)
195
+ vertices = vertices_2d.reshape(-1, 3)
196
+ colors = rgb_image[:, :, :3].reshape(-1, 3) / 255.0
197
+
198
+ faces = []
199
+ for y in range(h - 1):
200
+ for x in range(w - 1):
201
+ idx = y * w + x
202
+ faces.append([idx, idx + w, idx + 1])
203
+ faces.append([idx + 1, idx + w, idx + 1 + w])
204
+
205
+ # OUTER frame
206
+
207
+ nv = len(vertices)
208
+ vertices = np.append(
209
+ vertices,
210
+ [
211
+ [-w_half - f_thic, -h_half - f_thic, f_near], # 00
212
+ [-w_half - f_thic, -h_half - f_thic, f_far_outer], # 01
213
+ [w_half + f_thic, -h_half - f_thic, f_near], # 02
214
+ [w_half + f_thic, -h_half - f_thic, f_far_outer], # 03
215
+ [w_half + f_thic, h_half + f_thic, f_near], # 04
216
+ [w_half + f_thic, h_half + f_thic, f_far_outer], # 05
217
+ [-w_half - f_thic, h_half + f_thic, f_near], # 06
218
+ [-w_half - f_thic, h_half + f_thic, f_far_outer], # 07
219
+ ],
220
+ axis=0,
221
+ )
222
+ faces.extend(
223
+ [
224
+ [nv + 0, nv + 1, nv + 2],
225
+ [nv + 2, nv + 1, nv + 3],
226
+ [nv + 2, nv + 3, nv + 4],
227
+ [nv + 4, nv + 3, nv + 5],
228
+ [nv + 4, nv + 5, nv + 6],
229
+ [nv + 6, nv + 5, nv + 7],
230
+ [nv + 6, nv + 7, nv + 0],
231
+ [nv + 0, nv + 7, nv + 1],
232
+ ]
233
+ )
234
+ colors = np.append(colors, [[0.5, 0.5, 0.5]] * 8, axis=0)
235
+
236
+ # INNER frame
237
+
238
+ nv = len(vertices)
239
+ vertices_left_data = vertices_2d[:, 0] # H x 3
240
+ vertices_left_frame = vertices_2d[:, 0].copy() # H x 3
241
+ vertices_left_frame[:, 2] = f_near
242
+ vertices = np.append(vertices, vertices_left_data, axis=0)
243
+ vertices = np.append(vertices, vertices_left_frame, axis=0)
244
+ colors = np.append(colors, [[0.5, 0.5, 0.5]] * (2 * h), axis=0)
245
+ for i in range(h - 1):
246
+ nvi_d = nv + i
247
+ nvi_f = nvi_d + h
248
+ faces.append([nvi_d, nvi_f, nvi_d + 1])
249
+ faces.append([nvi_d + 1, nvi_f, nvi_f + 1])
250
+
251
+ nv = len(vertices)
252
+ vertices_right_data = vertices_2d[:, -1] # H x 3
253
+ vertices_right_frame = vertices_2d[:, -1].copy() # H x 3
254
+ vertices_right_frame[:, 2] = f_near
255
+ vertices = np.append(vertices, vertices_right_data, axis=0)
256
+ vertices = np.append(vertices, vertices_right_frame, axis=0)
257
+ colors = np.append(colors, [[0.5, 0.5, 0.5]] * (2 * h), axis=0)
258
+ for i in range(h - 1):
259
+ nvi_d = nv + i
260
+ nvi_f = nvi_d + h
261
+ faces.append([nvi_d, nvi_d + 1, nvi_f])
262
+ faces.append([nvi_d + 1, nvi_f + 1, nvi_f])
263
+
264
+ nv = len(vertices)
265
+ vertices_top_data = vertices_2d[0, :] # H x 3
266
+ vertices_top_frame = vertices_2d[0, :].copy() # H x 3
267
+ vertices_top_frame[:, 2] = f_near
268
+ vertices = np.append(vertices, vertices_top_data, axis=0)
269
+ vertices = np.append(vertices, vertices_top_frame, axis=0)
270
+ colors = np.append(colors, [[0.5, 0.5, 0.5]] * (2 * w), axis=0)
271
+ for i in range(w - 1):
272
+ nvi_d = nv + i
273
+ nvi_f = nvi_d + w
274
+ faces.append([nvi_d, nvi_d + 1, nvi_f])
275
+ faces.append([nvi_d + 1, nvi_f + 1, nvi_f])
276
+
277
+ nv = len(vertices)
278
+ vertices_bottom_data = vertices_2d[-1, :] # H x 3
279
+ vertices_bottom_frame = vertices_2d[-1, :].copy() # H x 3
280
+ vertices_bottom_frame[:, 2] = f_near
281
+ vertices = np.append(vertices, vertices_bottom_data, axis=0)
282
+ vertices = np.append(vertices, vertices_bottom_frame, axis=0)
283
+ colors = np.append(colors, [[0.5, 0.5, 0.5]] * (2 * w), axis=0)
284
+ for i in range(w - 1):
285
+ faces.append([nv, nv + 2 + i + 1, nv + 2 + i])
286
+ faces.append([nv, nv + 1, nv + w + 1])
287
+
288
+ # FRONT frame
289
+
290
+ nv = len(vertices)
291
+ vertices = np.append(
292
+ vertices,
293
+ [
294
+ [-w_half - f_thic, -h_half - f_thic, f_near],
295
+ [-w_half - f_thic, h_half + f_thic, f_near],
296
+ ],
297
+ axis=0,
298
+ )
299
+ vertices = np.append(vertices, vertices_left_frame, axis=0)
300
+ colors = np.append(colors, [[0.5, 0.5, 0.5]] * (2 + h), axis=0)
301
+ for i in range(h - 1):
302
+ faces.append([nv, nv + 2 + i + 1, nv + 2 + i])
303
+ faces.append([nv, nv + 2, nv + 1])
304
+
305
+ nv = len(vertices)
306
+ vertices = np.append(
307
+ vertices,
308
+ [
309
+ [w_half + f_thic, h_half + f_thic, f_near],
310
+ [w_half + f_thic, -h_half - f_thic, f_near],
311
+ ],
312
+ axis=0,
313
+ )
314
+ vertices = np.append(vertices, vertices_right_frame, axis=0)
315
+ colors = np.append(colors, [[0.5, 0.5, 0.5]] * (2 + h), axis=0)
316
+ for i in range(h - 1):
317
+ faces.append([nv, nv + 2 + i, nv + 2 + i + 1])
318
+ faces.append([nv, nv + h + 1, nv + 1])
319
+
320
+ nv = len(vertices)
321
+ vertices = np.append(
322
+ vertices,
323
+ [
324
+ [w_half + f_thic, h_half + f_thic, f_near],
325
+ [-w_half - f_thic, h_half + f_thic, f_near],
326
+ ],
327
+ axis=0,
328
+ )
329
+ vertices = np.append(vertices, vertices_top_frame, axis=0)
330
+ colors = np.append(colors, [[0.5, 0.5, 0.5]] * (2 + w), axis=0)
331
+ for i in range(w - 1):
332
+ faces.append([nv, nv + 2 + i, nv + 2 + i + 1])
333
+ faces.append([nv, nv + 1, nv + 2])
334
+
335
+ nv = len(vertices)
336
+ vertices = np.append(
337
+ vertices,
338
+ [
339
+ [-w_half - f_thic, -h_half - f_thic, f_near],
340
+ [w_half + f_thic, -h_half - f_thic, f_near],
341
+ ],
342
+ axis=0,
343
+ )
344
+ vertices = np.append(vertices, vertices_bottom_frame, axis=0)
345
+ colors = np.append(colors, [[0.5, 0.5, 0.5]] * (2 + w), axis=0)
346
+ for i in range(w - 1):
347
+ faces.append([nv, nv + 2 + i + 1, nv + 2 + i])
348
+ faces.append([nv, nv + 1, nv + w + 1])
349
+
350
+ # BACK frame
351
+
352
+ nv = len(vertices)
353
+ vertices = np.append(
354
+ vertices,
355
+ [
356
+ [-w_half - f_thic, -h_half - f_thic, f_far_outer], # 00
357
+ [w_half + f_thic, -h_half - f_thic, f_far_outer], # 01
358
+ [w_half + f_thic, h_half + f_thic, f_far_outer], # 02
359
+ [-w_half - f_thic, h_half + f_thic, f_far_outer], # 03
360
+ ],
361
+ axis=0,
362
+ )
363
+ faces.extend(
364
+ [
365
+ [nv + 0, nv + 2, nv + 1],
366
+ [nv + 2, nv + 0, nv + 3],
367
+ ]
368
+ )
369
+ colors = np.append(colors, [[0.5, 0.5, 0.5]] * 4, axis=0)
370
+
371
+
372
+ trimesh_kwargs = {}
373
+ if vertex_colors:
374
+ trimesh_kwargs["vertex_colors"] = colors
375
+ mesh = trimesh.Trimesh(vertices=vertices, faces=faces, **trimesh_kwargs)
376
+
377
+ mesh.merge_vertices()
378
+
379
+ current_max_dimension = max(mesh.extents)
380
+ scaling_factor = output_model_scale / current_max_dimension
381
+ mesh.apply_scale(scaling_factor)
382
+
383
+ if prepare_for_3d_printing:
384
+ rotation_mat = trimesh.transformations.rotation_matrix(
385
+ np.radians(0), [0.5, 0, 0]
386
+ )
387
+ mesh.apply_transform(rotation_mat)
388
+
389
+ if path_out_base is None:
390
+ path_out_base = os.path.splitext(path_depth)[0].replace("_16bit", "")
391
+ path_out_glb = path_out_base + ".glb"
392
+ path_out_stl = path_out_base + ".stl"
393
+ path_out_obj = path_out_base + ".obj"
394
+
395
+ mesh.export(path_out_stl, file_type="stl")
396
+ """
397
+ mesh.export(path_out_glb, file_type="glb")
398
+ if scene_lights:
399
+ glb_add_lights(path_out_glb, path_out_glb)
400
+ mesh.export(path_out_obj, file_type="obj")
401
+
402
+ if zip_outputs:
403
+ with zipfile.ZipFile(path_out_glb + ".zip", "w", zipfile.ZIP_DEFLATED) as zipf:
404
+ arcname = os.path.basename(os.path.splitext(path_out_glb)[0]) + ".glb"
405
+ zipf.write(path_out_glb, arcname=arcname)
406
+ path_out_glb = path_out_glb + ".zip"
407
+ with zipfile.ZipFile(path_out_stl + ".zip", "w", zipfile.ZIP_DEFLATED) as zipf:
408
+ arcname = os.path.basename(os.path.splitext(path_out_stl)[0]) + ".stl"
409
+ zipf.write(path_out_stl, arcname=arcname)
410
+ path_out_stl = path_out_stl + ".zip"
411
+ with zipfile.ZipFile(path_out_obj + ".zip", "w", zipfile.ZIP_DEFLATED) as zipf:
412
+ arcname = os.path.basename(os.path.splitext(path_out_obj)[0]) + ".obj"
413
+ zipf.write(path_out_obj, arcname=arcname)
414
+ path_out_obj = path_out_obj + ".zip"
415
+ """
416
+ return path_out_glb, path_out_stl, path_out_obj
417
+
418
+ def scale_to_width(img, length):
419
+ if img.width < img.height:
420
+ width = length
421
+ height = round(img.height * length / img.width)
422
+ else:
423
+ width = round(img.width * length / img.height)
424
+ height = length
425
+ return (width,height)
426
+
427
+
428
+ # Gradio Interface function
429
+ def process_image_and_generate_stl(image_input, depth_near, depth_far, thickness, alpha):
430
+ # Depth Estimation
431
+ raw_img = cv2.imread(image_input)
432
+ depth = model.infer_image(raw_img) # HxW raw depth map in numpy
433
+
434
+ # Save depth map temporarily
435
+ depth_output_path = "output_depth.png"
436
+ cv2.imwrite(depth_output_path, depth)
437
+
438
+ # Prepare images for 3D model generation
439
+ img_rgb = image_input
440
+ img_depth = depth_output_path
441
+ inv = 0 # Assuming no inversion for now, based on previous code
442
+ # Image.open(img_rgb).convert("L").save("example_1_black.png") # This line might not be necessary for the final output
443
+ size = scale_to_width(Image.open(img_rgb), 512)
444
+ Image.open(img_rgb).resize(size, Image.Resampling.LANCZOS).save("one.png") # Use Resampling.LANCZOS
445
+ if inv == 1:
446
+ Image.open(img_depth).convert(mode="F").resize(size, Image.Resampling.BILINEAR).convert("I").save("two.png") # Use Resampling.BILINEAR
447
+ else:
448
+ img=Image.open(img_depth).convert(mode="F").resize(size, Image.Resampling.BILINEAR).convert("I") # Use Resampling.BILINEAR
449
+ img = np.array(img).astype(np.double)
450
+ im_max=np.max(img)
451
+ im_min=np.min(img)
452
+ img=(1-(img-im_min)/(im_max-im_min))*im_max
453
+ img=Image.fromarray(img)
454
+ img.convert("I").save("two.png")
455
+
456
+
457
+ # 3D Model Generation
458
+ output_path_base = "generated_relief"
459
+ glb_path, stl_path, obj_path = extrude_depth_3d(
460
+ "one.png",
461
+ "two.png",
462
+ alpha=alpha,
463
+ invert=inv,
464
+ path_out_base=output_path_base,
465
+ output_model_scale=100,
466
+ filter_size=5, # Using 5 based on previous code
467
+ coef_near=depth_near,
468
+ coef_far=depth_far,
469
+ emboss=thickness,
470
+ f_thic=0.0, # Using 0.0 based on previous code
471
+ f_near=-thickness, # Using -thickness based on previous code
472
+ f_back=0.01, # Using 0.01 based on previous code
473
+ vertex_colors=True,
474
+ scene_lights=True,
475
+ prepare_for_3d_printing=True,
476
+ )
477
+
478
+ return stl_path # Return the path to the generated STL file
479
+
480
+
481
+ # Gradio Interface definition
482
+ iface = gr.Interface(
483
+ fn=process_image_and_generate_stl,
484
+ inputs=[
485
+ gr.Image(type="filepath", label="Upload Image"),
486
+ gr.Slider(minimum=0, maximum=1.0, value=0, label="Depth Near"),
487
+ gr.Slider(minimum=0, maximum=1.0, value=1.0, label="Depth Far"),
488
+ gr.Slider(minimum=0.1, maximum=1.0, value=0.3, label="Thickness"),
489
+ gr.Slider(minimum=0, maximum=1.0, value=0.05, label="Alpha"),
490
+ ],
491
+ outputs=gr.File(label="Download STL File"), # Use gr.File() for file downloads
492
+ title="Image to 2.5D Relief Model Generator",
493
+ description="Upload an image, set parameters, and generate a 2.5D relief model (.stl file)."
494
+ )
495
+
496
+ # Launch the interface (for local testing)
497
+ if __name__ == "__main__":
498
+ iface.launch(debug=True)