Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -11,10 +11,10 @@ from accelerate import Accelerator
|
|
11 |
accelerator = Accelerator(cpu=True)
|
12 |
apol=[]
|
13 |
pipe = accelerator.prepare(AutoPipelineForText2Image.from_pretrained("openskyml/overall-v1", torch_dtype=torch.float32, variant=None, use_safetensors=False, safety_checker=None))
|
14 |
-
pipe.scheduler =
|
15 |
pipe.unet.to(memory_format=torch.channels_last)
|
16 |
-
pipe
|
17 |
-
def plex(prompt,neg_prompt,nut):
|
18 |
if nut == 0:
|
19 |
nm = random.randint(1, 2147483616)
|
20 |
while nm % 32 != 0:
|
@@ -22,11 +22,11 @@ def plex(prompt,neg_prompt,nut):
|
|
22 |
else:
|
23 |
nm=nut
|
24 |
generator = torch.Generator(device="cpu").manual_seed(nm)
|
25 |
-
image = pipe(prompt=prompt, negative_prompt=neg_prompt, generator=generator, num_inference_steps=
|
26 |
for a, imze in enumerate(image["images"]):
|
27 |
apol.append(imze)
|
28 |
return apol
|
29 |
|
30 |
-
iface = gr.Interface(fn=plex,inputs=[gr.Textbox(label="Prompt"), gr.Textbox(label="negative_prompt", value="low quality, bad quality"),gr.Slider(label="manual seed (leave 0 for random)",minimum=0,step=32,maximum=2147483616,value=0)],outputs=gr.Gallery(label="Generated Output Image", columns=1),description="Running on cpu, very slow! by JoPmt.")
|
31 |
iface.queue(max_size=1,api_open=False)
|
32 |
iface.launch(max_threads=1)
|
|
|
11 |
accelerator = Accelerator(cpu=True)
|
12 |
apol=[]
|
13 |
pipe = accelerator.prepare(AutoPipelineForText2Image.from_pretrained("openskyml/overall-v1", torch_dtype=torch.float32, variant=None, use_safetensors=False, safety_checker=None))
|
14 |
+
pipe.scheduler = PNDMScheduler.from_config(pipe.scheduler.config)
|
15 |
pipe.unet.to(memory_format=torch.channels_last)
|
16 |
+
pipe.to("cpu")
|
17 |
+
def plex(prompt,neg_prompt,stips,nut):
|
18 |
if nut == 0:
|
19 |
nm = random.randint(1, 2147483616)
|
20 |
while nm % 32 != 0:
|
|
|
22 |
else:
|
23 |
nm=nut
|
24 |
generator = torch.Generator(device="cpu").manual_seed(nm)
|
25 |
+
image = pipe(prompt=[prompt]*2, negative_prompt=[neg_prompt]*2, generator=generator, num_inference_steps=stips)
|
26 |
for a, imze in enumerate(image["images"]):
|
27 |
apol.append(imze)
|
28 |
return apol
|
29 |
|
30 |
+
iface = gr.Interface(fn=plex,inputs=[gr.Textbox(label="Prompt"), gr.Textbox(label="negative_prompt", value="low quality, bad quality"),gr.Slider(label="num steps",minimum=1,step=1,maximum=30,value=20),gr.Slider(label="manual seed (leave 0 for random)",minimum=0,step=32,maximum=2147483616,value=0)],outputs=gr.Gallery(label="Generated Output Image", columns=1),description="Running on cpu, very slow! by JoPmt.")
|
31 |
iface.queue(max_size=1,api_open=False)
|
32 |
iface.launch(max_threads=1)
|