Spaces:
Running
on
Zero
Running
on
Zero
Update app.py
Browse files
app.py
CHANGED
|
@@ -3,7 +3,8 @@ import numpy as np
|
|
| 3 |
import random
|
| 4 |
import spaces
|
| 5 |
import torch
|
| 6 |
-
from diffusers import
|
|
|
|
| 7 |
from PIL import Image
|
| 8 |
import uuid
|
| 9 |
from typing import Tuple
|
|
@@ -11,7 +12,7 @@ from typing import Tuple
|
|
| 11 |
dtype = torch.bfloat16
|
| 12 |
device = "cuda" if torch.cuda.is_available() else "cpu"
|
| 13 |
|
| 14 |
-
pipe = DiffusionPipeline.from_pretrained("black-forest-labs/FLUX.1-
|
| 15 |
|
| 16 |
MAX_SEED = np.iinfo(np.int32).max
|
| 17 |
MAX_IMAGE_SIZE = 2048
|
|
@@ -191,7 +192,7 @@ with gr.Blocks(css=css, theme="bethecloud/storj_theme") as demo:
|
|
| 191 |
minimum=1,
|
| 192 |
maximum=50,
|
| 193 |
step=1,
|
| 194 |
-
value=
|
| 195 |
)
|
| 196 |
|
| 197 |
gr.Examples(
|
|
|
|
| 3 |
import random
|
| 4 |
import spaces
|
| 5 |
import torch
|
| 6 |
+
from diffusers import DiffusionPipeline, FlowMatchEulerDiscreteScheduler
|
| 7 |
+
from transformers import CLIPTextModel, CLIPTokenizer,T5EncoderModel, T5TokenizerFast
|
| 8 |
from PIL import Image
|
| 9 |
import uuid
|
| 10 |
from typing import Tuple
|
|
|
|
| 12 |
dtype = torch.bfloat16
|
| 13 |
device = "cuda" if torch.cuda.is_available() else "cpu"
|
| 14 |
|
| 15 |
+
pipe = DiffusionPipeline.from_pretrained("black-forest-labs/FLUX.1-dev", torch_dtype=dtype).to(device)
|
| 16 |
|
| 17 |
MAX_SEED = np.iinfo(np.int32).max
|
| 18 |
MAX_IMAGE_SIZE = 2048
|
|
|
|
| 192 |
minimum=1,
|
| 193 |
maximum=50,
|
| 194 |
step=1,
|
| 195 |
+
value=28,
|
| 196 |
)
|
| 197 |
|
| 198 |
gr.Examples(
|