Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -19,7 +19,7 @@ MAX_IMAGE_SIZE = 1024
|
|
19 |
|
20 |
|
21 |
def get_lora_sd_pipeline(
|
22 |
-
ckpt_dir='./
|
23 |
base_model_name_or_path=model_id_default,
|
24 |
dtype=torch_dtype,
|
25 |
device=device
|
@@ -81,7 +81,7 @@ def infer(
|
|
81 |
seed=42,
|
82 |
guidance_scale=7.0,
|
83 |
lora_scale=1.0,
|
84 |
-
num_inference_steps=
|
85 |
progress=gr.Progress(track_tqdm=True),
|
86 |
):
|
87 |
generator = torch.Generator(device).manual_seed(seed)
|
@@ -99,7 +99,6 @@ def infer(
|
|
99 |
height=height,
|
100 |
generator=generator,
|
101 |
).images[0]
|
102 |
-
print(device, torch_dtype)
|
103 |
|
104 |
return image
|
105 |
|
|
|
19 |
|
20 |
|
21 |
def get_lora_sd_pipeline(
|
22 |
+
ckpt_dir='./output',
|
23 |
base_model_name_or_path=model_id_default,
|
24 |
dtype=torch_dtype,
|
25 |
device=device
|
|
|
81 |
seed=42,
|
82 |
guidance_scale=7.0,
|
83 |
lora_scale=1.0,
|
84 |
+
num_inference_steps=30,
|
85 |
progress=gr.Progress(track_tqdm=True),
|
86 |
):
|
87 |
generator = torch.Generator(device).manual_seed(seed)
|
|
|
99 |
height=height,
|
100 |
generator=generator,
|
101 |
).images[0]
|
|
|
102 |
|
103 |
return image
|
104 |
|