Spaces:
Running
on
Zero
Running
on
Zero
File size: 1,711 Bytes
3427608 b177a48 0e503f3 3427608 b177a48 671eb7e 0e503f3 b177a48 671eb7e 0e503f3 b177a48 671eb7e 3427608 b177a48 7271da2 b177a48 7271da2 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 |
from diffusers import DiffusionPipeline
from diffusers import AutoPipelineForText2Image
from diffusers import StableCascadeDecoderPipeline, StableCascadePriorPipeline
import torch
def load_huggingface_model(model_name, model_type):
if model_name == "SD-turbo":
pipe = AutoPipelineForText2Image.from_pretrained("stabilityai/sd-turbo", torch_dtype=torch.float16, variant="fp16")
pipe = pipe.to("cuda")
elif model_name == "SDXL-turbo":
pipe = AutoPipelineForText2Image.from_pretrained("stabilityai/sdxl-turbo", torch_dtype=torch.float16, variant="fp16")
pipe = pipe.to("cuda")
elif model_name == "Stable-cascade":
prior = StableCascadePriorPipeline.from_pretrained("stabilityai/stable-cascade-prior", variant="bf16", torch_dtype=torch.bfloat16)
decoder = StableCascadeDecoderPipeline.from_pretrained("stabilityai/stable-cascade", variant="bf16", torch_dtype=torch.float16)
pipe = [prior, decoder]
else:
raise NotImplementedError
# if model_name == "SD-turbo":
# pipe = AutoPipelineForText2Image.from_pretrained("stabilityai/sd-turbo")
# elif model_name == "SDXL-turbo":
# pipe = AutoPipelineForText2Image.from_pretrained("stabilityai/sdxl-turbo")
# else:
# raise NotImplementedError
# pipe = pipe.to("cpu")
return pipe
if __name__ == "__main__":
for name in ["SD-turbo", "SDXL-turbo"]: #"SD-turbo", "SDXL-turbo"
pipe = load_huggingface_model(name, "text2image")
# for name in ["IF-I-XL-v1.0"]:
# pipe = load_huggingface_model(name, 'text2image')
# pipe = DiffusionPipeline.from_pretrained("DeepFloyd/IF-I-XL-v1.0", variant="fp16", torch_dtype=torch.float16) |