Update app.py
Browse files
app.py
CHANGED
@@ -18,7 +18,7 @@ pipe = DiffusionPipeline.from_pretrained(model_repo_id, torch_dtype=torch_dtype)
|
|
18 |
pipe = pipe.to(device)
|
19 |
|
20 |
MAX_SEED = 66
|
21 |
-
MAX_IMAGE_SIZE =
|
22 |
|
23 |
|
24 |
|
@@ -52,9 +52,9 @@ def infer(
|
|
52 |
|
53 |
|
54 |
examples = [
|
55 |
-
"
|
56 |
-
"
|
57 |
-
"
|
58 |
]
|
59 |
|
60 |
css = """
|
@@ -66,13 +66,13 @@ css = """
|
|
66 |
|
67 |
with gr.Blocks(css=css) as demo:
|
68 |
with gr.Column(elem_id="col-container"):
|
69 |
-
gr.Markdown(" # ߙߛߕ-ߊ |θ_θ| ")
|
70 |
|
71 |
with gr.Row():
|
72 |
prompt = gr.Text(
|
73 |
label="Prompt",
|
74 |
show_label=False,
|
75 |
-
max_lines=
|
76 |
placeholder="Enter your prompt",
|
77 |
container=False,
|
78 |
)
|
@@ -84,7 +84,7 @@ with gr.Blocks(css=css) as demo:
|
|
84 |
with gr.Accordion("Advanced Settings", open=False):
|
85 |
negative_prompt = gr.Text(
|
86 |
label="Negative prompt",
|
87 |
-
max_lines=1,
|
88 |
placeholder="Enter a negative prompt",
|
89 |
visible=False,
|
90 |
)
|
@@ -93,8 +93,8 @@ with gr.Blocks(css=css) as demo:
|
|
93 |
label="Seed",
|
94 |
minimum=0,
|
95 |
maximum=MAX_SEED,
|
96 |
-
step=1.
|
97 |
-
value=0,
|
98 |
)
|
99 |
|
100 |
randomize_seed = gr.Checkbox(label="Randomize seed", value=True)
|
@@ -104,7 +104,7 @@ with gr.Blocks(css=css) as demo:
|
|
104 |
label="Width",
|
105 |
minimum=720,
|
106 |
maximum=MAX_IMAGE_SIZE,
|
107 |
-
step=1.
|
108 |
value=720,
|
109 |
)
|
110 |
|
@@ -112,7 +112,7 @@ with gr.Blocks(css=css) as demo:
|
|
112 |
label="Height",
|
113 |
minimum=720,
|
114 |
maximum=MAX_IMAGE_SIZE,
|
115 |
-
step=1.
|
116 |
value=720,
|
117 |
)
|
118 |
|
@@ -120,7 +120,7 @@ with gr.Blocks(css=css) as demo:
|
|
120 |
guidance_scale = gr.Slider(
|
121 |
label="Guidance scale",
|
122 |
minimum=0.0,
|
123 |
-
maximum=2
|
124 |
step=0.6,
|
125 |
value=0.3,
|
126 |
)
|
@@ -128,8 +128,8 @@ with gr.Blocks(css=css) as demo:
|
|
128 |
num_inference_steps = gr.Slider(
|
129 |
label="Number of inference steps",
|
130 |
minimum=1,
|
131 |
-
maximum=
|
132 |
-
step=1.
|
133 |
value=2,
|
134 |
)
|
135 |
|
@@ -152,3 +152,4 @@ with gr.Blocks(css=css) as demo:
|
|
152 |
|
153 |
if __name__ == "__main__":
|
154 |
demo.launch()
|
|
|
|
18 |
pipe = pipe.to(device)
|
19 |
|
20 |
MAX_SEED = 66
|
21 |
+
MAX_IMAGE_SIZE = 666
|
22 |
|
23 |
|
24 |
|
|
|
52 |
|
53 |
|
54 |
examples = [
|
55 |
+
"two soldiers wearing gas masks, clad in military digital camo jungle fatigues, djing on futuristic mixers, synth, mpcs. location jungle rave.",
|
56 |
+
"in a dark jungle, a wizard and a warlock face each other as in a epic battle, casting spells to operate vintage machines like mixers, synths, turntable.",
|
57 |
+
"A mesmerizing, bioluminescent DNA double helix, illuminated by a kaleidoscope of vibrant, pulsating light beams from colorful lasers, suspended in a futuristic, setting.",
|
58 |
]
|
59 |
|
60 |
css = """
|
|
|
66 |
|
67 |
with gr.Blocks(css=css) as demo:
|
68 |
with gr.Column(elem_id="col-container"):
|
69 |
+
gr.Markdown(" # ߙߛߕ-ߊ |θ_θ| -零- ")
|
70 |
|
71 |
with gr.Row():
|
72 |
prompt = gr.Text(
|
73 |
label="Prompt",
|
74 |
show_label=False,
|
75 |
+
max_lines=1.3,
|
76 |
placeholder="Enter your prompt",
|
77 |
container=False,
|
78 |
)
|
|
|
84 |
with gr.Accordion("Advanced Settings", open=False):
|
85 |
negative_prompt = gr.Text(
|
86 |
label="Negative prompt",
|
87 |
+
max_lines=1.3,
|
88 |
placeholder="Enter a negative prompt",
|
89 |
visible=False,
|
90 |
)
|
|
|
93 |
label="Seed",
|
94 |
minimum=0,
|
95 |
maximum=MAX_SEED,
|
96 |
+
step=1.6,
|
97 |
+
value=0.3,
|
98 |
)
|
99 |
|
100 |
randomize_seed = gr.Checkbox(label="Randomize seed", value=True)
|
|
|
104 |
label="Width",
|
105 |
minimum=720,
|
106 |
maximum=MAX_IMAGE_SIZE,
|
107 |
+
step=1.6,
|
108 |
value=720,
|
109 |
)
|
110 |
|
|
|
112 |
label="Height",
|
113 |
minimum=720,
|
114 |
maximum=MAX_IMAGE_SIZE,
|
115 |
+
step=1.6,
|
116 |
value=720,
|
117 |
)
|
118 |
|
|
|
120 |
guidance_scale = gr.Slider(
|
121 |
label="Guidance scale",
|
122 |
minimum=0.0,
|
123 |
+
maximum=2,
|
124 |
step=0.6,
|
125 |
value=0.3,
|
126 |
)
|
|
|
128 |
num_inference_steps = gr.Slider(
|
129 |
label="Number of inference steps",
|
130 |
minimum=1,
|
131 |
+
maximum=3,
|
132 |
+
step=1.6,
|
133 |
value=2,
|
134 |
)
|
135 |
|
|
|
152 |
|
153 |
if __name__ == "__main__":
|
154 |
demo.launch()
|
155 |
+
|