Update app.py
Browse files
app.py
CHANGED
@@ -1,5 +1,5 @@
|
|
1 |
#!/usr/bin/env python
|
2 |
-
#Patch0.1x
|
3 |
import os
|
4 |
import random
|
5 |
import uuid
|
@@ -77,15 +77,14 @@ if torch.cuda.is_available():
|
|
77 |
"Corcelio/mobius",
|
78 |
torch_dtype=torch.float16,
|
79 |
use_safetensors=True,
|
80 |
-
add_watermarker=False
|
81 |
-
variant="fp16"
|
82 |
)
|
83 |
if ENABLE_CPU_OFFLOAD:
|
84 |
pipe.enable_model_cpu_offload()
|
85 |
else:
|
86 |
-
pipe.to(device)
|
87 |
print("Loaded on Device!")
|
88 |
-
|
89 |
if USE_TORCH_COMPILE:
|
90 |
pipe.unet = torch.compile(pipe.unet, mode="reduce-overhead", fullgraph=True)
|
91 |
print("Model Compiled!")
|
@@ -211,7 +210,7 @@ with gr.Blocks(css=css, theme="bethecloud/storj_theme") as demo:
|
|
211 |
label="Width",
|
212 |
minimum=512,
|
213 |
maximum=2048,
|
214 |
-
step
|
215 |
value=1024,
|
216 |
)
|
217 |
height = gr.Slider(
|
|
|
1 |
#!/usr/bin/env python
|
2 |
+
# Patch0.1x
|
3 |
import os
|
4 |
import random
|
5 |
import uuid
|
|
|
77 |
"Corcelio/mobius",
|
78 |
torch_dtype=torch.float16,
|
79 |
use_safetensors=True,
|
80 |
+
add_watermarker=False
|
|
|
81 |
)
|
82 |
if ENABLE_CPU_OFFLOAD:
|
83 |
pipe.enable_model_cpu_offload()
|
84 |
else:
|
85 |
+
pipe.to(device)
|
86 |
print("Loaded on Device!")
|
87 |
+
|
88 |
if USE_TORCH_COMPILE:
|
89 |
pipe.unet = torch.compile(pipe.unet, mode="reduce-overhead", fullgraph=True)
|
90 |
print("Model Compiled!")
|
|
|
210 |
label="Width",
|
211 |
minimum=512,
|
212 |
maximum=2048,
|
213 |
+
step 8,
|
214 |
value=1024,
|
215 |
)
|
216 |
height = gr.Slider(
|