Spaces:
Running
on
Zero
Running
on
Zero
File size: 10,625 Bytes
6d4bcdf 1d5bb62 6d4bcdf 1d5bb62 6d4bcdf 1d5bb62 6d4bcdf 1d5bb62 6d4bcdf 1d5bb62 6d4bcdf 1d5bb62 6d4bcdf |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 |
import os
import gradio as gr
from utils import tensor_to_pil
from utils.image_generation import generate_image_condition, get_flux_pipe, get_sdxl_pipe
from utils.mesh_utils import Mesh
from utils.render_utils import render_views
from utils.texture_generation import generate_texture, get_seqtex_pipe
EXAMPLES = [
["examples/birdhouse.glb", True, False, False, False, 42, "First View", "SDXL", False, "A rustic birdhouse featuring a snow-covered roof, wood textures, and two decorative cardinal birds. It has a circular entryway and conveys a winter-themed aesthetic."],
["examples/shoe.glb", True, False, False, False, 42, "Second View", "SDXL", False, "Modern sneaker exhibiting a mesh upper and wavy rubber outsole. Features include lacing for adjustability and padded components for comfort. Normal maps emphasize geometric detail."],
# ["examples/mario.glb", False, False, False, True, 6666, "Third View", "FLUX", True, "Mario, a cartoon character wearing a red cap and blue overalls, with brown hair and a mustache, and white gloves, in a fighting pose. The clothes he wears are not in a reflection mode."],
]
LOAD_FIRST = True
with gr.Blocks(delete_cache=(600, 600)) as demo:
gr.Markdown("# π¨ SeqTex: Generate Mesh Textures in Video Sequence")
gr.Markdown("""
## π Welcome to SeqTex!
**SeqTex** is a cutting-edge AI system that generates high-quality textures for 3D meshes using image prompts (here we use image generator to get them from textual prompts).
Choose to either **try our example models** below or **upload your own 3D mesh** to create stunning textures.
""")
gr.Markdown("---")
gr.Markdown("## π§ Step 1: Upload & Process 3D Mesh")
gr.Markdown("""
**π How to prepare your 3D mesh:**
- Upload your 3D mesh in **.obj** or **.glb** format
- **π‘ Pro Tip**:
- For optimal results, ensure your mesh includes only one part with <span style="color:#e74c3c; font-weight:bold;">UV parameterization</span>
- Otherwise, we'll combine all parts and generate UV parameterization using *xAtlas* (may take longer for high-poly meshes; may also fail for certain meshes)
- **β οΈ Important**: We recommend adjusting your model using *Mesh Orientation Adjustments* to be **Z-UP oriented** for best results
""")
position_map_tensor_path = gr.State()
normal_map_tensor_path = gr.State()
position_images_tensor_path = gr.State()
normal_images_tensor_path = gr.State()
mask_images_tensor_path = gr.State()
w2c_tensor_path = gr.State()
mesh = gr.State()
mvp_matrix_tensor_path = gr.State()
# fixed_texture_map = Image.open("image.webp").convert("RGB")
# Step 1
with gr.Row():
with gr.Column():
mesh_upload = gr.File(label="π Upload 3D Mesh", file_types=[".obj", ".glb"])
# uv_tool = gr.Radio(["xAtlas", "UVAtlas"], label="UV parameterizer", value="xAtlas")
gr.Markdown("**π Mesh Orientation Adjustments** (if needed):")
y2z = gr.Checkbox(label="Y β Z Transform", value=False, info="Rotate: Y becomes Z, -Z becomes Y")
y2x = gr.Checkbox(label="Y β X Transform", value=False, info="Rotate: Y becomes X, -X becomes Y")
z2x = gr.Checkbox(label="Z β X Transform", value=False, info="Rotate: Z becomes X, -X becomes Z")
upside_down = gr.Checkbox(label="π Flip Vertically", value=False, info="Fix upside-down mesh orientation")
step1_button = gr.Button("π Process Mesh & Generate Views", variant="primary")
step1_progress = gr.Textbox(label="π Processing Status", interactive=False)
with gr.Column():
model_input = gr.Model3D(label="π Processed 3D Model", height=500)
with gr.Row(equal_height=True):
rgb_views = gr.Image(label="π· Generated Views", type="pil", scale=3)
position_map = gr.Image(label="πΊοΈ Position Map", type="pil", scale=1)
normal_map = gr.Image(label="π§ Normal Map", type="pil", scale=1)
step1_button.click(
Mesh.process,
inputs=[mesh_upload, gr.State("xAtlas"), y2z, y2x, z2x, upside_down],
outputs=[position_map_tensor_path, normal_map_tensor_path, position_images_tensor_path, normal_images_tensor_path, mask_images_tensor_path, w2c_tensor_path, mesh, mvp_matrix_tensor_path, step1_progress]
).success(
tensor_to_pil,
inputs=[normal_images_tensor_path, mask_images_tensor_path],
outputs=[rgb_views]
).success(
tensor_to_pil,
inputs=[position_map_tensor_path],
outputs=[position_map]
).success(
tensor_to_pil,
inputs=[normal_map_tensor_path],
outputs=[normal_map]
).success(
Mesh.export,
inputs=[mesh, gr.State(None), gr.State(None)],
outputs=[model_input]
)
# Step 2
gr.Markdown("---")
gr.Markdown("## ποΈ Step 2: Select View & Generate Image Condition")
gr.Markdown("""
**π How to generate image condition:**
- Your mesh will be rendered from **four viewpoints** (front, back, left, right)
- Choose **one view** as your image condition
- Enter a **descriptive text prompt** for the desired texture
- Select your preferred AI model:
- <span style="color:#27ae60; font-weight:bold;">π― SDXL</span>: Fast generation with depth + normal control, better details (often suffer from wrong highlights)
- <span style="color:#3498db; font-weight:bold;">β‘ FLUX</span>: ~~High-quality generation with depth control (slower due to CPU offloading). Better work with **Edge Refinement**~~ (Not supported due to the memory limit of HF Space. You can try it locally)
""")
with gr.Row():
with gr.Column():
img_condition_seed = gr.Number(label="π² Random Seed", minimum=0, maximum=9999, step=1, value=42, info="Change for different results")
selected_view = gr.Radio(["First View", "Second View", "Third View", "Fourth View"], label="π Camera View", value="First View", info="Choose which viewpoint to use as reference")
with gr.Row():
# model_choice = gr.Radio(["SDXL", "FLUX"], label="π€ AI Model", value="SDXL", info="SDXL: Fast, depth+normal control | FLUX: High-quality, slower processing")
model_choice = gr.Radio(["SDXL"], label="π€ AI Model", value="SDXL", info="SDXL: Fast, depth+normal control | FLUX: High-quality, slower processing (Not supported due to the memory limit of HF Space)")
edge_refinement = gr.Checkbox(label="β¨ Edge Refinement", value=True, info="Smooth boundary artifacts (recommended for delightning highlights in the boundary)")
text_prompt = gr.Textbox(label="π¬ Texture Description", placeholder="Describe the desired texture appearance (e.g., 'rustic wooden surface with weathered paint')", lines=2)
step2_button = gr.Button("π― Generate Image Condition", variant="primary")
step2_progress = gr.Textbox(label="π Generation Status", interactive=False)
with gr.Column():
condition_image = gr.Image(label="πΌοΈ Generated Image Condition", type="pil") # , interactive=False
step2_button.click(
generate_image_condition,
inputs=[position_images_tensor_path, normal_images_tensor_path, mask_images_tensor_path, w2c_tensor_path, text_prompt, selected_view, img_condition_seed, model_choice, edge_refinement],
outputs=[condition_image, step2_progress],
)
# Step 3
gr.Markdown("---")
gr.Markdown("## π¨ Step 3: Generate Final Texture")
gr.Markdown("""
**π How to generate final texture:**
- The **SeqTex pipeline** will create a complete texture map for your model
- View the results from multiple angles and download your textured 3D model (the viewport is a little bit dark)
""")
texture_map_tensor_path = gr.State()
with gr.Row():
with gr.Column(scale=1):
step3_button = gr.Button("π¨ Generate Final Texture", variant="primary")
step3_progress = gr.Textbox(label="π Texture Generation Status", interactive=False)
texture_map = gr.Image(label="π Generated Texture Map", interactive=False)
with gr.Column(scale=2):
rendered_imgs = gr.Image(label="πΌοΈ Final Rendered Views")
mv_branch_imgs = gr.Image(label="πΌοΈ SeqTex Direct Output")
with gr.Column(scale=1.5):
model_display = gr.Model3D(label="π Final Textured Model", height=500)
# model_display = LitModel3D(label="Model with Texture",
# exposure=30.0,
# height=500)
step3_button.click(
generate_texture,
inputs=[position_map_tensor_path, normal_map_tensor_path, position_images_tensor_path, normal_images_tensor_path, condition_image, text_prompt, selected_view],
outputs=[texture_map_tensor_path, texture_map, mv_branch_imgs, step3_progress],
).success(
render_views,
inputs=[mesh, texture_map_tensor_path, mvp_matrix_tensor_path],
outputs=[rendered_imgs]
).success(
Mesh.export,
inputs=[mesh, gr.State(None), texture_map],
outputs=[model_display]
)
# Add example inputs for user convenience
gr.Markdown("---")
gr.Markdown("## π Try Our Examples")
gr.Markdown("**Quick Start**: Click on any example below to see SeqTex in action with pre-configured settings!")
gr.Examples(
examples=EXAMPLES,
inputs=[mesh_upload, y2z, y2x, z2x, upside_down, img_condition_seed, selected_view, model_choice, edge_refinement, text_prompt],
cache_examples=False
)
# Acknowledgments
gr.Markdown("---")
gr.Markdown("## π Acknowledgments")
gr.Markdown("""
**Special thanks to [Toshihiro Hayashi](mailto:[email protected])** for his valuable support and assistance in fixing bugs for this demo.
""")
if LOAD_FIRST is True:
import gc
get_seqtex_pipe()
print("SeqTex pipeline loaded successfully.")
get_sdxl_pipe()
print("SDXL pipeline loaded successfully.")
# get_flux_pipe()
# Note: FLUX pipeline is available in code but not loaded due to GPU memory constraints on HF Space
print("Note: FLUX and other models are available for local deployment.")
gc.collect()
assert os.environ["OPENCV_IO_ENABLE_OPENEXR"] == "1", "OpenEXR support is required for this demo."
demo.launch(server_name="0.0.0.0") |