doevent commited on
Commit
bcf9dbd
·
verified ·
1 Parent(s): 390d3b9

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +4 -2
app.py CHANGED
@@ -6,7 +6,9 @@ import torch
6
  from diffusers import DiffusionPipeline, FlowMatchEulerDiscreteScheduler, FluxTransformer2DModel, FluxPipeline
7
  from transformers import CLIPTextModel, CLIPTokenizer,T5EncoderModel, T5TokenizerFast
8
  from huggingface_hub import hf_hub_download
 
9
 
 
10
  dtype = torch.bfloat16
11
  device = "cuda" if torch.cuda.is_available() else "cpu"
12
 
@@ -14,10 +16,10 @@ repo_name = "ByteDance/Hyper-SD"
14
  ckpt_name = "Hyper-FLUX.1-dev-8steps-lora.safetensors"
15
  hyper_lora = hf_hub_download(repo_name, ckpt_name)
16
 
17
- pipe = FluxPipeline.from_pretrained(base_model_id, token="xxx")
18
  pipe.load_lora_weights(hf_hub_download(repo_name, ckpt_name))
19
  pipe.fuse_lora(lora_scale=0.125)
20
- pipe.to("cuda", dtype=torch.float16)
21
 
22
 
23
  # pipe = FluxPipeline.from_pretrained("sayakpaul/FLUX.1-merged", torch_dtype=torch.bfloat16).to(device)
 
6
  from diffusers import DiffusionPipeline, FlowMatchEulerDiscreteScheduler, FluxTransformer2DModel, FluxPipeline
7
  from transformers import CLIPTextModel, CLIPTokenizer,T5EncoderModel, T5TokenizerFast
8
  from huggingface_hub import hf_hub_download
9
+ import os
10
 
11
+ token_hf = os.environ["HF_TOKEN"]
12
  dtype = torch.bfloat16
13
  device = "cuda" if torch.cuda.is_available() else "cpu"
14
 
 
16
  ckpt_name = "Hyper-FLUX.1-dev-8steps-lora.safetensors"
17
  hyper_lora = hf_hub_download(repo_name, ckpt_name)
18
 
19
+ pipe = FluxPipeline.from_pretrained(base_model_id, token=token_hf)
20
  pipe.load_lora_weights(hf_hub_download(repo_name, ckpt_name))
21
  pipe.fuse_lora(lora_scale=0.125)
22
+ pipe.to("cuda", dtype=dtype)
23
 
24
 
25
  # pipe = FluxPipeline.from_pretrained("sayakpaul/FLUX.1-merged", torch_dtype=torch.bfloat16).to(device)