File size: 317 Bytes
325aac4
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
from .utils import (
    is_torch_available,
    is_transformers_available,
)


__version__ = "0.9.0"


if is_torch_available() and is_transformers_available():
    from .stable_diffusion import (
        StableDiffusionPipeline,
    )
else:
    from .utils.dummy_torch_and_transformers_objects import *  # noqa F403