Spaces:
Running
Running
import torch | |
from diffusers import StableDiffusionInstructPix2PixPipeline | |
import gradio as gr | |
from PIL import Image | |
import random | |
# Load the InstructPix2Pix model | |
model_id = "timbrooks/instruct-pix2pix" | |
pipe = StableDiffusionInstructPix2PixPipeline.from_pretrained(model_id, torch_dtype=torch.float16) | |
pipe = pipe.to("cpu") | |
# Initialize a random seed | |
seed = random.randint(0, 10000) | |
# Function to reset the seed | |
def change_style(): | |
global seed | |
seed = random.randint(0, 10000) | |
return f"Seed changed to: {seed}" | |
# Furniture adding function | |
def add_furniture(image, style, color, room_type): | |
# Construct the furniture prompt | |
prompt = f"Add {style} style furniture with a {color} tone to this {room_type}." | |
# Apply the edit using InstructPix2Pix | |
edited_image = pipe(prompt=prompt, image=image, num_inference_steps=50, guidance_scale=7.5, generator=torch.manual_seed(seed)).images[0] | |
return edited_image | |
# General image editing function | |
def edit_image(image, instruction): | |
# Apply the edit using InstructPix2Pix | |
edited_image = pipe(prompt=instruction, image=image, num_inference_steps=50, guidance_scale=7.5, generator=torch.manual_seed(seed)).images[0] | |
return edited_image | |
# Gradio interface for furniture adding | |
def furniture_interface(): | |
with gr.Blocks() as demo_furniture: | |
gr.Markdown("## Furniture Adding App") | |
# Image upload | |
image_input = gr.Image(type="pil", label="Upload an Image") | |
# Dropdown for furniture style | |
style_input = gr.Dropdown(["Modern", "Classic", "Minimalist", "Vintage"], label="Choose Furniture Style") | |
# Dropdown for color | |
color_input = gr.Dropdown(["Blue", "Green", "Red", "White", "Black"], label="Choose Furniture Color") | |
# Dropdown for room type | |
room_type_input = gr.Dropdown(["Living Room", "Bedroom", "Office", "Dining Room"], label="Room Type") | |
# Display output image | |
result_image = gr.Image(label="Edited Image") | |
# Button to apply the furniture transformation | |
submit_button = gr.Button("Add Furniture") | |
# Button to change the seed (style) | |
change_style_button = gr.Button("Change the Style") | |
# Output for seed change message | |
seed_output = gr.Textbox(label="Seed Info", interactive=False) | |
# Define action on button click | |
submit_button.click(fn=add_furniture, inputs=[image_input, style_input, color_input, room_type_input], outputs=result_image) | |
change_style_button.click(fn=change_style, outputs=seed_output) | |
return demo_furniture | |
# Gradio interface for general image editing | |
def general_editing_interface(): | |
with gr.Blocks() as demo_general: | |
gr.Markdown("## General Image Editing App") | |
# Image upload | |
image_input = gr.Image(type="pil", label="Upload an Image") | |
# Textbox for instruction | |
instruction_input = gr.Textbox(label="Enter the Instruction", placeholder="Describe the changes (e.g., 'Add sunglasses to the person')") | |
# Display output image | |
result_image = gr.Image(label="Edited Image") | |
# Button to apply the transformation | |
submit_button = gr.Button("Apply Edit") | |
# Button to change the seed (style) | |
change_style_button = gr.Button("Change the Style") | |
# Output for seed change message | |
seed_output = gr.Textbox(label="Seed Info", interactive=False) | |
# Define action on button click | |
submit_button.click(fn=edit_image, inputs=[image_input, instruction_input], outputs=result_image) | |
change_style_button.click(fn=change_style, outputs=seed_output) | |
return demo_general | |
# Launch both Gradio apps | |
furniture_app = furniture_interface() | |
general_editing_app = general_editing_interface() | |
with gr.Blocks() as combined_demo: | |
gr.Markdown("## Select the Application") | |
with gr.Tab("Furniture Adding App"): | |
furniture_app.render() | |
with gr.Tab("General Image Editing App"): | |
general_editing_app.render() | |
# Launch the combined Gradio app | |
combined_demo.launch() | |