Spaces:
Running
on
Zero
Running
on
Zero
Update app.py
Browse files
app.py
CHANGED
|
@@ -72,10 +72,10 @@ else:
|
|
| 72 |
print("CUDA installation not found")
|
| 73 |
|
| 74 |
|
| 75 |
-
device =
|
| 76 |
|
| 77 |
base_model = "black-forest-labs/FLUX.1-dev"
|
| 78 |
-
file_flux = hf_hub_download("marduk191/Flux.1_collection", "flux.
|
| 79 |
pipe = FluxPipeline.from_single_file(file_flux, torch_dtype=torch.bfloat16, token=huggingface_token).to(device)
|
| 80 |
|
| 81 |
# Load and fuse LoRA BEFORE quantizing
|
|
|
|
| 72 |
print("CUDA installation not found")
|
| 73 |
|
| 74 |
|
| 75 |
+
device = 'cuda'
|
| 76 |
|
| 77 |
base_model = "black-forest-labs/FLUX.1-dev"
|
| 78 |
+
file_flux = hf_hub_download("marduk191/Flux.1_collection", "flux.1_dev_8x8_e4m3fn-marduk191.safetensors")
|
| 79 |
pipe = FluxPipeline.from_single_file(file_flux, torch_dtype=torch.bfloat16, token=huggingface_token).to(device)
|
| 80 |
|
| 81 |
# Load and fuse LoRA BEFORE quantizing
|