Spaces:
Running
on
Zero
Running
on
Zero
Update app.py
Browse files
app.py
CHANGED
@@ -9,7 +9,7 @@ from diffusers.utils import load_image
|
|
9 |
device = "cuda" if torch.cuda.is_available() else "cpu"
|
10 |
dtype = torch.float16 if torch.cuda.is_available() else torch.float32
|
11 |
pipe = AutoPipelineForText2Image.from_pretrained("stabilityai/stable-diffusion-xl-base-1.0", torch_dtype=dtype).to("cuda")
|
12 |
-
pipe.load_ip_adapter("h94/IP-Adapter", subfolder="sdxl_models", weight_name="ip-adapter_sdxl.bin")
|
13 |
|
14 |
def randomize_seed_fn(seed: int, randomize_seed: bool) -> int:
|
15 |
if randomize_seed:
|
@@ -42,15 +42,15 @@ def create_image(image_pil,
|
|
42 |
"down": {"block_2": [0.0, control_scale]},
|
43 |
"up": {"block_0": [0.0, control_scale, 0.0]},
|
44 |
}
|
45 |
-
pipe.set_ip_adapter_scale(scale)
|
46 |
|
47 |
-
style_image = load_image(image_pil)
|
48 |
generator = torch.Generator().manual_seed(randomize_seed_fn(seed, True))
|
49 |
|
50 |
|
51 |
image = pipe(
|
52 |
prompt=prompt,
|
53 |
-
ip_adapter_image=style_image,
|
54 |
negative_prompt=n_prompt,
|
55 |
guidance_scale=guidance_scale,
|
56 |
num_inference_steps=num_inference_steps,
|
|
|
9 |
device = "cuda" if torch.cuda.is_available() else "cpu"
|
10 |
dtype = torch.float16 if torch.cuda.is_available() else torch.float32
|
11 |
pipe = AutoPipelineForText2Image.from_pretrained("stabilityai/stable-diffusion-xl-base-1.0", torch_dtype=dtype).to("cuda")
|
12 |
+
#pipe.load_ip_adapter("h94/IP-Adapter", subfolder="sdxl_models", weight_name="ip-adapter_sdxl.bin")
|
13 |
|
14 |
def randomize_seed_fn(seed: int, randomize_seed: bool) -> int:
|
15 |
if randomize_seed:
|
|
|
42 |
"down": {"block_2": [0.0, control_scale]},
|
43 |
"up": {"block_0": [0.0, control_scale, 0.0]},
|
44 |
}
|
45 |
+
#pipe.set_ip_adapter_scale(scale)
|
46 |
|
47 |
+
#style_image = load_image(image_pil)
|
48 |
generator = torch.Generator().manual_seed(randomize_seed_fn(seed, True))
|
49 |
|
50 |
|
51 |
image = pipe(
|
52 |
prompt=prompt,
|
53 |
+
#ip_adapter_image=style_image,
|
54 |
negative_prompt=n_prompt,
|
55 |
guidance_scale=guidance_scale,
|
56 |
num_inference_steps=num_inference_steps,
|