Spaces:
Running
on
Zero
Running
on
Zero
Update app.py
Browse files
app.py
CHANGED
|
@@ -23,6 +23,7 @@ from transformers import (
|
|
| 23 |
|
| 24 |
device = "cuda" if torch.cuda.is_available() else "cpu"
|
| 25 |
model_repo_id = "Alpha-VLLM/Lumina-Image-2.0"
|
|
|
|
| 26 |
|
| 27 |
if torch.cuda.is_available():
|
| 28 |
torch_dtype = torch.bfloat16
|
|
@@ -30,10 +31,9 @@ else:
|
|
| 30 |
torch_dtype = torch.float32
|
| 31 |
|
| 32 |
###
|
| 33 |
-
|
| 34 |
vae = AutoencoderKL.from_pretrained(model_repo_id, subfolder="vae")
|
| 35 |
text_encoder = Gemma2Model.from_pretrained(model_repo_id, subfolder="text_encoder")
|
| 36 |
-
transformer = Lumina2Transformer2DModel.from_pretrained(model_repo_id, subfolder="transformer")
|
| 37 |
tokenizer = GemmaTokenizer.from_pretrained(model_repo_id, subfolder="tokenizer")
|
| 38 |
scheduler = FlowMatchEulerDiscreteScheduler.from_pretrained(model_repo_id, subfolder="scheduler")
|
| 39 |
|
|
|
|
| 23 |
|
| 24 |
device = "cuda" if torch.cuda.is_available() else "cpu"
|
| 25 |
model_repo_id = "Alpha-VLLM/Lumina-Image-2.0"
|
| 26 |
+
transformer_repo_id = "benjamin-paine/Lumina-Image-2.0" # Temporarily fixed, change when main repo gets updated
|
| 27 |
|
| 28 |
if torch.cuda.is_available():
|
| 29 |
torch_dtype = torch.bfloat16
|
|
|
|
| 31 |
torch_dtype = torch.float32
|
| 32 |
|
| 33 |
###
|
| 34 |
+
transformer = Lumina2Transformer2DModel.from_pretrained(transformer_repo_id, subfolder="transformer")
|
| 35 |
vae = AutoencoderKL.from_pretrained(model_repo_id, subfolder="vae")
|
| 36 |
text_encoder = Gemma2Model.from_pretrained(model_repo_id, subfolder="text_encoder")
|
|
|
|
| 37 |
tokenizer = GemmaTokenizer.from_pretrained(model_repo_id, subfolder="tokenizer")
|
| 38 |
scheduler = FlowMatchEulerDiscreteScheduler.from_pretrained(model_repo_id, subfolder="scheduler")
|
| 39 |
|