Spaces:
Paused
Paused
Add flag
Browse files
app.py
CHANGED
@@ -1,4 +1,7 @@
|
|
|
|
|
|
1 |
import math
|
|
|
2 |
import secrets
|
3 |
from pathlib import Path
|
4 |
from typing import cast
|
@@ -14,8 +17,6 @@ from PIL import Image, ImageFilter, ImageOps
|
|
14 |
|
15 |
DEVICE = "cuda"
|
16 |
|
17 |
-
EXAMPLES_DIR = Path(__file__).parent / "examples"
|
18 |
-
|
19 |
MAX_SEED = np.iinfo(np.int32).max
|
20 |
|
21 |
SYSTEM_PROMPT = r"""This two-panel split-frame image showcases a furniture in as a product shot versus styled in a room.
|
@@ -24,6 +25,39 @@ SYSTEM_PROMPT = r"""This two-panel split-frame image showcases a furniture in as
|
|
24 |
|
25 |
MASK_CONTEXT_PADDING = 16 * 8
|
26 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
27 |
if not torch.cuda.is_available():
|
28 |
|
29 |
def _dummy_pipe(image: Image.Image, *args, **kwargs): # noqa: ARG001
|
@@ -54,6 +88,9 @@ else:
|
|
54 |
pipe.to(DEVICE)
|
55 |
|
56 |
|
|
|
|
|
|
|
57 |
def make_example(image_path: Path, mask_path: Path) -> EditorValue:
|
58 |
background_image = Image.open(image_path)
|
59 |
background_image = background_image.convert("RGB")
|
@@ -174,6 +211,18 @@ def adjust_bbox_to_divisible_16(
|
|
174 |
return x_min, y_min, x_max, y_max
|
175 |
|
176 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
177 |
@spaces.GPU(duration=150)
|
178 |
def infer(
|
179 |
furniture_image_input: Image.Image,
|
@@ -376,18 +425,15 @@ with gr.Blocks(css=css) as demo:
|
|
376 |
max_height=50,
|
377 |
)
|
378 |
furniture_image_input = gr.Image(
|
379 |
-
label="
|
380 |
type="pil",
|
381 |
sources=["upload"],
|
382 |
image_mode="RGB",
|
383 |
height=500,
|
384 |
)
|
385 |
furniture_examples = gr.Examples(
|
386 |
-
examples=[
|
387 |
-
|
388 |
-
EXAMPLES_DIR / "2" / "furniture_image.png",
|
389 |
-
],
|
390 |
-
examples_per_page=12,
|
391 |
inputs=[furniture_image_input],
|
392 |
)
|
393 |
with gr.Column(elem_id="col-mid"):
|
@@ -402,7 +448,7 @@ with gr.Blocks(css=css) as demo:
|
|
402 |
max_height=50,
|
403 |
)
|
404 |
room_image_input = gr.ImageEditor(
|
405 |
-
label="
|
406 |
type="pil",
|
407 |
sources=["upload"],
|
408 |
image_mode="RGBA",
|
@@ -411,16 +457,18 @@ with gr.Blocks(css=css) as demo:
|
|
411 |
height=500,
|
412 |
)
|
413 |
room_examples = gr.Examples(
|
414 |
-
examples=[
|
415 |
-
|
416 |
-
|
417 |
-
|
418 |
-
|
419 |
-
|
420 |
-
|
421 |
-
|
422 |
-
|
423 |
-
|
|
|
|
|
424 |
inputs=[room_image_input],
|
425 |
)
|
426 |
with gr.Column(elem_id="col-right"):
|
@@ -435,14 +483,15 @@ with gr.Blocks(css=css) as demo:
|
|
435 |
max_height=50,
|
436 |
)
|
437 |
results = gr.Gallery(
|
438 |
-
label="
|
439 |
show_label=False,
|
440 |
columns=2,
|
441 |
height=500,
|
442 |
-
|
|
|
443 |
)
|
444 |
run_button = gr.Button("Run")
|
445 |
-
|
446 |
# Reset the results when the run button is clicked
|
447 |
run_button.click(
|
448 |
outputs=results,
|
@@ -498,6 +547,16 @@ with gr.Blocks(css=css) as demo:
|
|
498 |
value=20,
|
499 |
)
|
500 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
501 |
with gr.Column(elem_id="col-showcase"):
|
502 |
gr.HTML("""
|
503 |
<div style="display: flex; justify-content: center; align-items: center; text-align: center; font-size: 20px;">
|
@@ -509,28 +568,13 @@ with gr.Blocks(css=css) as demo:
|
|
509 |
</div>
|
510 |
""")
|
511 |
show_case = gr.Examples(
|
512 |
-
examples=
|
513 |
-
|
514 |
-
EXAMPLES_DIR / "1" / "furniture_image.png",
|
515 |
-
make_example(
|
516 |
-
EXAMPLES_DIR / "1" / "room_image.png",
|
517 |
-
EXAMPLES_DIR / "1" / "room_mask.png",
|
518 |
-
),
|
519 |
-
],
|
520 |
-
[
|
521 |
-
EXAMPLES_DIR / "2" / "furniture_image.png",
|
522 |
-
make_example(
|
523 |
-
EXAMPLES_DIR / "2" / "room_image.png",
|
524 |
-
EXAMPLES_DIR / "2" / "room_mask.png",
|
525 |
-
),
|
526 |
-
],
|
527 |
-
],
|
528 |
-
inputs=[furniture_image_input, room_image_input],
|
529 |
label=None,
|
|
|
530 |
)
|
531 |
|
532 |
-
|
533 |
-
triggers=[run_button.click],
|
534 |
fn=infer,
|
535 |
inputs=[
|
536 |
furniture_image_input,
|
@@ -545,5 +589,10 @@ with gr.Blocks(css=css) as demo:
|
|
545 |
],
|
546 |
outputs=[results, seed],
|
547 |
)
|
|
|
|
|
|
|
|
|
|
|
548 |
|
549 |
demo.launch()
|
|
|
1 |
+
import csv
|
2 |
+
import json
|
3 |
import math
|
4 |
+
import os
|
5 |
import secrets
|
6 |
from pathlib import Path
|
7 |
from typing import cast
|
|
|
17 |
|
18 |
DEVICE = "cuda"
|
19 |
|
|
|
|
|
20 |
MAX_SEED = np.iinfo(np.int32).max
|
21 |
|
22 |
SYSTEM_PROMPT = r"""This two-panel split-frame image showcases a furniture in as a product shot versus styled in a room.
|
|
|
25 |
|
26 |
MASK_CONTEXT_PADDING = 16 * 8
|
27 |
|
28 |
+
if not os.environ.get("IN_SPACES", None):
|
29 |
+
FLAG_PATH = Path("/data/") / "flagged_data_points"
|
30 |
+
else:
|
31 |
+
FLAG_PATH = Path(__file__).parent / "flagged_data_points"
|
32 |
+
|
33 |
+
EXAMPLES: dict[str, list[str, str, str, list[str]]] = {}
|
34 |
+
|
35 |
+
if not FLAG_PATH.exists():
|
36 |
+
FLAG_PATH.mkdir(parents=True)
|
37 |
+
else:
|
38 |
+
flag_files = FLAG_PATH.glob("dataset*.csv")
|
39 |
+
for flag_file in flag_files:
|
40 |
+
with flag_file.open("r") as file:
|
41 |
+
reader = csv.reader(file)
|
42 |
+
next(reader)
|
43 |
+
for row in reader:
|
44 |
+
furniture_image, room_image, results_values, time = row
|
45 |
+
room_image = json.loads(room_image)
|
46 |
+
room_image_background = room_image["background"]
|
47 |
+
room_image_layers = room_image["layers"]
|
48 |
+
room_image_composite = room_image["composite"]
|
49 |
+
results_values = json.loads(results_values)
|
50 |
+
results_values = [result["image"] for result in results_values]
|
51 |
+
EXAMPLES[time] = [
|
52 |
+
furniture_image,
|
53 |
+
{
|
54 |
+
"background": room_image_background,
|
55 |
+
"layers": room_image_layers,
|
56 |
+
"composite": room_image_composite,
|
57 |
+
},
|
58 |
+
# results_values,
|
59 |
+
]
|
60 |
+
|
61 |
if not torch.cuda.is_available():
|
62 |
|
63 |
def _dummy_pipe(image: Image.Image, *args, **kwargs): # noqa: ARG001
|
|
|
88 |
pipe.to(DEVICE)
|
89 |
|
90 |
|
91 |
+
callback = gr.CSVLogger()
|
92 |
+
|
93 |
+
|
94 |
def make_example(image_path: Path, mask_path: Path) -> EditorValue:
|
95 |
background_image = Image.open(image_path)
|
96 |
background_image = background_image.convert("RGB")
|
|
|
211 |
return x_min, y_min, x_max, y_max
|
212 |
|
213 |
|
214 |
+
def flag(
|
215 |
+
furniture_image_input: Image.Image,
|
216 |
+
room_image_input: EditorValue,
|
217 |
+
results: GalleryMediaType,
|
218 |
+
):
|
219 |
+
if len(results) == 0:
|
220 |
+
return
|
221 |
+
callback.flag(
|
222 |
+
flag_data=[furniture_image_input, room_image_input, results],
|
223 |
+
)
|
224 |
+
|
225 |
+
|
226 |
@spaces.GPU(duration=150)
|
227 |
def infer(
|
228 |
furniture_image_input: Image.Image,
|
|
|
425 |
max_height=50,
|
426 |
)
|
427 |
furniture_image_input = gr.Image(
|
428 |
+
label="furniture",
|
429 |
type="pil",
|
430 |
sources=["upload"],
|
431 |
image_mode="RGB",
|
432 |
height=500,
|
433 |
)
|
434 |
furniture_examples = gr.Examples(
|
435 |
+
examples=list({example[0] for example in EXAMPLES.values()}),
|
436 |
+
examples_per_page=6,
|
|
|
|
|
|
|
437 |
inputs=[furniture_image_input],
|
438 |
)
|
439 |
with gr.Column(elem_id="col-mid"):
|
|
|
448 |
max_height=50,
|
449 |
)
|
450 |
room_image_input = gr.ImageEditor(
|
451 |
+
label="room_image",
|
452 |
type="pil",
|
453 |
sources=["upload"],
|
454 |
image_mode="RGBA",
|
|
|
457 |
height=500,
|
458 |
)
|
459 |
room_examples = gr.Examples(
|
460 |
+
examples=[example[1] for example in EXAMPLES.values()],
|
461 |
+
examples_per_page=6,
|
462 |
+
# examples=[
|
463 |
+
# make_example(
|
464 |
+
# EXAMPLES_DIR / "1" / "room_image.png",
|
465 |
+
# EXAMPLES_DIR / "1" / "room_mask.png",
|
466 |
+
# ),
|
467 |
+
# make_example(
|
468 |
+
# EXAMPLES_DIR / "2" / "room_image.png",
|
469 |
+
# EXAMPLES_DIR / "2" / "room_mask.png",
|
470 |
+
# ),
|
471 |
+
# ],
|
472 |
inputs=[room_image_input],
|
473 |
)
|
474 |
with gr.Column(elem_id="col-right"):
|
|
|
483 |
max_height=50,
|
484 |
)
|
485 |
results = gr.Gallery(
|
486 |
+
label="results",
|
487 |
show_label=False,
|
488 |
columns=2,
|
489 |
height=500,
|
490 |
+
format="png",
|
491 |
+
# interactive=False,
|
492 |
)
|
493 |
run_button = gr.Button("Run")
|
494 |
+
flag_button = gr.Button("Flag")
|
495 |
# Reset the results when the run button is clicked
|
496 |
run_button.click(
|
497 |
outputs=results,
|
|
|
547 |
value=20,
|
548 |
)
|
549 |
|
550 |
+
# This needs to be called at some point prior to the first call to callback.flag()
|
551 |
+
callback.setup(
|
552 |
+
[
|
553 |
+
furniture_image_input,
|
554 |
+
room_image_input,
|
555 |
+
# results,
|
556 |
+
],
|
557 |
+
"flagged_data_points",
|
558 |
+
)
|
559 |
+
|
560 |
with gr.Column(elem_id="col-showcase"):
|
561 |
gr.HTML("""
|
562 |
<div style="display: flex; justify-content: center; align-items: center; text-align: center; font-size: 20px;">
|
|
|
568 |
</div>
|
569 |
""")
|
570 |
show_case = gr.Examples(
|
571 |
+
examples=list(EXAMPLES.values()),
|
572 |
+
inputs=[furniture_image_input, room_image_input, results],
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
573 |
label=None,
|
574 |
+
examples_per_page=12,
|
575 |
)
|
576 |
|
577 |
+
run_button.click(
|
|
|
578 |
fn=infer,
|
579 |
inputs=[
|
580 |
furniture_image_input,
|
|
|
589 |
],
|
590 |
outputs=[results, seed],
|
591 |
)
|
592 |
+
flag_button.click(
|
593 |
+
fn=flag,
|
594 |
+
inputs=[furniture_image_input, room_image_input, results],
|
595 |
+
preprocess=False,
|
596 |
+
)
|
597 |
|
598 |
demo.launch()
|