Spaces:
Running
on
Zero
Running
on
Zero
hatmanstack
commited on
Commit
·
c9cab76
1
Parent(s):
614d5f3
ZeroGPU to T4
Browse files
app.py
CHANGED
@@ -1,22 +1,21 @@
|
|
1 |
import torch
|
2 |
import random
|
3 |
-
import spaces
|
4 |
import gradio as gr
|
5 |
-
from PIL import Image
|
6 |
from diffusers import AutoPipelineForText2Image
|
7 |
from diffusers.utils import load_image
|
8 |
|
9 |
device = "cuda" if torch.cuda.is_available() else "cpu"
|
10 |
dtype = torch.float16 if torch.cuda.is_available() else torch.float32
|
11 |
-
pipe = AutoPipelineForText2Image.from_pretrained("stabilityai/stable-diffusion-xl-base-1.0", torch_dtype=dtype)
|
12 |
pipe.load_ip_adapter("h94/IP-Adapter", subfolder="sdxl_models", weight_name="ip-adapter_sdxl.bin")
|
13 |
pipe.to(device)
|
14 |
def randomize_seed_fn(seed: int, randomize_seed: bool) -> int:
|
15 |
if randomize_seed:
|
16 |
seed = random.randint(0, 2000)
|
17 |
return seed
|
18 |
-
|
19 |
-
|
20 |
def create_image(image_pil,
|
21 |
prompt,
|
22 |
n_prompt,
|
@@ -43,9 +42,9 @@ def create_image(image_pil,
|
|
43 |
"up": {"block_0": [0.0, control_scale, 0.0]},
|
44 |
}
|
45 |
pipe.set_ip_adapter_scale(scale)
|
46 |
-
|
47 |
style_image = load_image(image_pil)
|
48 |
-
generator = torch.Generator().manual_seed(randomize_seed_fn(seed, True))
|
49 |
|
50 |
|
51 |
image = pipe(
|
@@ -57,6 +56,7 @@ def create_image(image_pil,
|
|
57 |
generator=generator,
|
58 |
).images[0]
|
59 |
|
|
|
60 |
return image
|
61 |
|
62 |
|
@@ -146,4 +146,4 @@ with block:
|
|
146 |
|
147 |
gr.Markdown(article)
|
148 |
|
149 |
-
block.launch(show_error=True)
|
|
|
1 |
import torch
|
2 |
import random
|
3 |
+
#import spaces ## For ZeroGPU
|
4 |
import gradio as gr
|
|
|
5 |
from diffusers import AutoPipelineForText2Image
|
6 |
from diffusers.utils import load_image
|
7 |
|
8 |
device = "cuda" if torch.cuda.is_available() else "cpu"
|
9 |
dtype = torch.float16 if torch.cuda.is_available() else torch.float32
|
10 |
+
pipe = AutoPipelineForText2Image.from_pretrained("stabilityai/stable-diffusion-xl-base-1.0", torch_dtype=dtype).to("cuda") ## For ZeroGPU no .to("cuda")
|
11 |
pipe.load_ip_adapter("h94/IP-Adapter", subfolder="sdxl_models", weight_name="ip-adapter_sdxl.bin")
|
12 |
pipe.to(device)
|
13 |
def randomize_seed_fn(seed: int, randomize_seed: bool) -> int:
|
14 |
if randomize_seed:
|
15 |
seed = random.randint(0, 2000)
|
16 |
return seed
|
17 |
+
print("piped")
|
18 |
+
#@spaces.GPU() ## For ZeroGPU
|
19 |
def create_image(image_pil,
|
20 |
prompt,
|
21 |
n_prompt,
|
|
|
42 |
"up": {"block_0": [0.0, control_scale, 0.0]},
|
43 |
}
|
44 |
pipe.set_ip_adapter_scale(scale)
|
45 |
+
|
46 |
style_image = load_image(image_pil)
|
47 |
+
generator = torch.Generator(device="cpu").manual_seed(randomize_seed_fn(seed, True)) ## For ZeroGPU no device="cpu"
|
48 |
|
49 |
|
50 |
image = pipe(
|
|
|
56 |
generator=generator,
|
57 |
).images[0]
|
58 |
|
59 |
+
|
60 |
return image
|
61 |
|
62 |
|
|
|
146 |
|
147 |
gr.Markdown(article)
|
148 |
|
149 |
+
block.launch(show_error=True)
|