JoPmt's picture
Update app.py
24dbf56 verified
raw
history blame
1.53 kB
from diffusers import AutoPipelineForText2Image, PNDMScheduler
import torch
import gradio as gr
from PIL import Image
import os, random
import PIL.Image
from transformers import pipeline
from diffusers.utils import load_image
from accelerate import Accelerator
accelerator = Accelerator(cpu=True)
apol=[]
pipe = accelerator.prepare(AutoPipelineForText2Image.from_pretrained("openskyml/overall-v1", torch_dtype=torch.float32, variant=None, use_safetensors=False, safety_checker=None))
pipe.scheduler = accelerator.prepare(PNDMScheduler.from_config(pipe.scheduler.config))
pipe.unet.to(memory_format=torch.channels_last)
pipe = accelerator.prepare(pipe.to("cpu"))
def plex(prompt,neg_prompt,nut):
if nut == 0:
nm = random.randint(1, 2147483616)
while nm % 32 != 0:
nm = random.randint(1, 2147483616)
else:
nm=nut
generator = torch.Generator(device="cpu").manual_seed(nm)
image = pipe(prompt=prompt, negative_prompt=neg_prompt, generator=generator, num_inference_steps=15)
for a, imze in enumerate(image["images"]):
apol.append(imze)
return apol
iface = gr.Interface(fn=plex,inputs=[gr.Textbox(label="Prompt"), gr.Textbox(label="negative_prompt", value="low quality, bad quality"),gr.Slider(label="manual seed (leave 0 for random)",minimum=0,step=32,maximum=2147483616,value=0)],outputs=gr.Gallery(label="Generated Output Image", columns=1),description="Running on cpu, very slow! by JoPmt.")
iface.queue(max_size=1,api_open=False)
iface.launch(max_threads=1)