V1 / app.py
michaelapplydesign's picture
up
8f75876
raw
history blame
2.84 kB
import gradio as gr
import numpy as np
from models import make_inpainting
import utils
def removeFurniture(input_img1,
input_img2,
positive_prompt,
negative_prompt,
num_of_images,
resolution
):
print("removeFurniture")
HEIGHT = resolution
WIDTH = resolution
input_img1 = input_img1.resize((resolution, resolution))
input_img2 = input_img2.resize((resolution, resolution))
canvas_mask = np.array(input_img2)
mask = utils.get_mask(canvas_mask)
print(input_img1, mask, positive_prompt, negative_prompt)
retList= make_inpainting(positive_prompt=positive_prompt,
image=input_img1,
mask_image=mask,
negative_prompt=negative_prompt,
num_of_images=num_of_images,
resolution=resolution
)
# add the rest up to 10
while (len(retList)<10):
retList.append(None)
return retList
def segmentation(image):
return image
def upscale(image):
return image
with gr.Blocks() as app:
with gr.Row():
with gr.Column():
gr.Button("FurnituRemove").click(removeFurniture,
inputs=[gr.Image(label="img", type="pil"),
gr.Image(label="mask", type="pil"),
gr.Textbox(label="positive_prompt",value="empty room"),
gr.Textbox(label="negative_prompt",value=""),
gr.Number(label="num_of_images",value=2),
gr.Number(label="resolution",value=512)
],
outputs=[
gr.Image(),
gr.Image(),
gr.Image(),
gr.Image(),
gr.Image(),
gr.Image(),
gr.Image(),
gr.Image(),
gr.Image(),
gr.Image()])
with gr.Column():
gr.Button("Segmentation").click(segmentation, inputs=gr.Image(type="pil"), outputs=gr.Image())
with gr.Column():
gr.Button("Upscale").click(upscale, inputs=gr.Image(type="pil"), outputs=gr.Image())
app.launch(debug=True,share=True)