gokaygokay commited on
Commit
649d25b
·
verified ·
1 Parent(s): 8f9b55f

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +2 -2
app.py CHANGED
@@ -72,10 +72,10 @@ else:
72
  print("CUDA installation not found")
73
 
74
 
75
- device = torch.device('cuda')
76
 
77
  base_model = "black-forest-labs/FLUX.1-dev"
78
- file_flux = hf_hub_download("marduk191/Flux.1_collection", "flux.1_dev_fp8_fp16t5-marduk191.safetensors")
79
  pipe = FluxPipeline.from_single_file(file_flux, torch_dtype=torch.bfloat16, token=huggingface_token).to(device)
80
 
81
  # Load and fuse LoRA BEFORE quantizing
 
72
  print("CUDA installation not found")
73
 
74
 
75
+ device = 'cuda'
76
 
77
  base_model = "black-forest-labs/FLUX.1-dev"
78
+ file_flux = hf_hub_download("marduk191/Flux.1_collection", "flux.1_dev_8x8_e4m3fn-marduk191.safetensors")
79
  pipe = FluxPipeline.from_single_file(file_flux, torch_dtype=torch.bfloat16, token=huggingface_token).to(device)
80
 
81
  # Load and fuse LoRA BEFORE quantizing