Spaces:
Running
Running
File size: 5,222 Bytes
4f5dc12 a03bee1 4f5dc12 828ab31 4f8b2be 828ab31 4f5dc12 828ab31 4f8b2be 828ab31 4f5dc12 828ab31 4f5dc12 828ab31 4f5dc12 828ab31 4f5dc12 828ab31 4f5dc12 828ab31 4f5dc12 828ab31 4f5dc12 828ab31 4f5dc12 828ab31 4f5dc12 828ab31 4f5dc12 828ab31 4f5dc12 828ab31 4f5dc12 828ab31 4f5dc12 828ab31 4f5dc12 828ab31 4f5dc12 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 |
import torch
from diffusers import StableDiffusionInstructPix2PixPipeline
import gradio as gr
from PIL import Image
import random
# Load the InstructPix2Pix model
model_id = "timbrooks/instruct-pix2pix"
pipe = StableDiffusionInstructPix2PixPipeline.from_pretrained(model_id, torch_dtype=torch.float16)
pipe = pipe.to("cpu")
# Initialize a random seed
seed = random.randint(0, 10000)
# Function to reset the seed
def change_style():
global seed
seed = random.randint(0, 10000)
return f"Seed changed to: {seed}"
# Changign the walls' color function
def change_color(image,color):
# Construct the color prompt
prompt = f"paint the walls with {color} color "
# Text CFG (guidance_scale) controls how strongly the model follows the prompt
text_cfg = 7.5
# Image CFG: Although not explicitly part of InstructPix2Pix, you can "simulate" image preservation
# by lowering the impact of the guidance. Here, we assume lower guidance impacts image preservation.
image_cfg = 1.5
# Apply the edit using InstructPix2Pix, with text CFG and image CFG influencing the guidance scale
edited_image = pipe(
prompt=prompt,
image=image,
num_inference_steps=70, # Number of diffusion steps
guidance_scale=text_cfg, # Text CFG for following the prompt
image_guidance_scale=image_cfg, # Simulated Image CFG to preserve image content
generator=torch.manual_seed(seed) # Random seed for consistency
).images[0]
return edited_image
# General image editing function
def edit_image(image, instruction):
# Text CFG (guidance_scale) controls how strongly the model follows the prompt
text_cfg = 12.0
# Image CFG: Simulated value for preserving the original image content
image_cfg = 1.5
# Apply the edit using InstructPix2Pix, with text CFG and simulated image CFG
edited_image = pipe(
prompt=instruction,
image=image,
num_inference_steps=70, # Number of diffusion steps
guidance_scale=text_cfg, # Text CFG for following the prompt
image_guidance_scale=image_cfg, # Simulated Image CFG to preserve image content
generator=torch.manual_seed(seed) # Random seed for consistency
).images[0]
return edited_image
# Gradio interface for image editing
def image_interface():
with gr.Blocks() as demo_color:
gr.Markdown("## Painting Color Changing App")
# Image upload
image_input = gr.Image(type="pil", label="Upload Room Image")
# List of common painting colors
common_colors = [
"Alabaster", # Off-white
"Agreeable Gray", # Warm gray
"Sea Salt", # Soft greenish-blue
"Pure White", # Bright white
"Accessible Beige", # Warm beige
"Mindful Gray", # Cool gray
"Peppercorn", # Dark charcoal gray
"Hale Navy", # Dark navy blue
"Tricorn Black", # Pure black
"Pale Oak", # Soft taupe
"Silver Strand", # Soft blue-gray
"Rainwashed", # Light aqua
"Orange Burst", # Bright orange
"Sunny Yellow", # Bright yellow
"Sage Green", # Muted green
"Firebrick Red", # Deep red
"Lavender", # Soft purple
"Sky Blue", # Light blue
"Coral", # Vibrant coral
]
# Dropdown for wall color
color_input = gr.Dropdown(common_colors, label="Choose Wall Color")
# Display output image
result_image = gr.Image(label="Edited Image")
# Button to apply the wall color transformation
submit_button = gr.Button("Paint the walls")
# Define action on button click
submit_button.click(fn=change_color, inputs=[image_input, color_input], outputs=result_image)
return demo_color
# Gradio interface for general image editing
def general_editing_interface():
with gr.Blocks() as demo_general:
gr.Markdown("## General Image Editing App")
# Image upload
image_input = gr.Image(type="pil", label="Upload an Image")
# Textbox for instruction
instruction_input = gr.Textbox(label="Enter the Instruction", placeholder="Describe the changes (e.g., 'Make it snowy')")
# Display output image
result_image = gr.Image(label="Edited Image")
# Button to apply the transformation
submit_button = gr.Button("Apply Edit")
# Button to change the seed (style)
change_style_button = gr.Button("Change the Style")
# Output for seed change message
seed_output = gr.Textbox(label="Seed Info", interactive=False)
# Define action on button click
submit_button.click(fn=edit_image, inputs=[image_input, instruction_input], outputs=result_image)
change_style_button.click(fn=change_style, outputs=seed_output)
return demo_general
# Launch both Gradio apps
color_app = image_interface()
general_editing_app = general_editing_interface()
with gr.Blocks() as combined_demo:
gr.Markdown("## Select the Application")
with gr.Tab("General Image Editing App"):
general_editing_app.render()
with gr.Tab("Changing The Paint App"):
color_app.render()
# Launch the combined Gradio app
combined_demo.launch()
|