openfree commited on
Commit
c699db9
ยท
verified ยท
1 Parent(s): 7cea496

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +36 -4
app.py CHANGED
@@ -71,25 +71,57 @@ def translate_to_english(text: str) -> str:
71
  print(f"Translation error: {str(e)}")
72
  return text
73
 
74
- # Eric cat LoRA ๊ฐ€์ค‘์น˜ ๋กœ๋“œ
75
  try:
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
76
  lora_path = hf_hub_download(
77
- "openfree/myt-flux-fantasy",
78
- "myt-flux-fantasy.safetensors",
79
  use_auth_token=HF_TOKEN
80
  )
 
 
 
81
  pipe.load_lora_weights(lora_path)
82
  pipe.fuse_lora(lora_scale=0.125)
 
 
83
  except Exception as e:
84
  print(f"Error loading LoRA weights: {str(e)}")
 
85
  raise ValueError("Failed to load LoRA weights. Please check your HF_TOKEN and model access.")
86
 
87
- # GPU๋กœ ์ด๋™
88
  if torch.cuda.is_available():
89
  try:
 
90
  pipe = pipe.to("cuda:0")
 
 
91
  except Exception as e:
92
  print(f"Warning: Could not move pipeline to CUDA: {str(e)}")
 
 
93
 
94
  # ์ €์žฅ ๋””๋ ‰ํ† ๋ฆฌ ์„ค์ •
95
  SAVE_DIR = "saved_images"
 
71
  print(f"Translation error: {str(e)}")
72
  return text
73
 
74
+ print("Initializing FLUX pipeline...")
75
  try:
76
+ pipe = FluxPipeline.from_pretrained(
77
+ "black-forest-labs/FLUX.1-dev",
78
+ torch_dtype=torch.float16,
79
+ use_auth_token=HF_TOKEN,
80
+ safety_checker=None # ์•ˆ์ „์„ฑ ๊ฒ€์‚ฌ ๋น„ํ™œ์„ฑํ™”
81
+ )
82
+ print("FLUX pipeline initialized successfully")
83
+
84
+ # ๋ฉ”๋ชจ๋ฆฌ ์ตœ์ ํ™” ์„ค์ •
85
+ pipe.enable_attention_slicing(slice_size="auto")
86
+ pipe.enable_model_cpu_offload() # CPU ์˜คํ”„๋กœ๋”ฉ ํ™œ์„ฑํ™”
87
+ print("Pipeline optimization settings applied")
88
+
89
+ except Exception as e:
90
+ print(f"Error initializing FLUX pipeline: {str(e)}")
91
+ raise
92
+
93
+ # LoRA ๊ฐ€์ค‘์น˜ ๋กœ๋“œ ๋ถ€๋ถ„ ์ˆ˜์ •
94
+ print("Loading LoRA weights...")
95
+ try:
96
+ # LoRA ํŒŒ์ผ ๊ฒฝ๋กœ ํ™•์ธ
97
  lora_path = hf_hub_download(
98
+ repo_id="openfree/myt-flux-fantasy",
99
+ filename="myt-flux-fantasy.safetensors", # ์ •ํ™•ํ•œ ํŒŒ์ผ๋ช… ํ™•์ธ ํ•„์š”
100
  use_auth_token=HF_TOKEN
101
  )
102
+ print(f"LoRA weights downloaded to: {lora_path}")
103
+
104
+ # LoRA ๊ฐ€์ค‘์น˜ ๋กœ๋“œ
105
  pipe.load_lora_weights(lora_path)
106
  pipe.fuse_lora(lora_scale=0.125)
107
+ print("LoRA weights loaded and fused successfully")
108
+
109
  except Exception as e:
110
  print(f"Error loading LoRA weights: {str(e)}")
111
+ print(f"Full error details: {repr(e)}")
112
  raise ValueError("Failed to load LoRA weights. Please check your HF_TOKEN and model access.")
113
 
114
+ # GPU ์ด๋™ ๋ถ€๋ถ„ ์ˆ˜์ •
115
  if torch.cuda.is_available():
116
  try:
117
+ print("Moving pipeline to GPU...")
118
  pipe = pipe.to("cuda:0")
119
+ print("Pipeline successfully moved to GPU")
120
+ print(f"Current device: {pipe.device}")
121
  except Exception as e:
122
  print(f"Warning: Could not move pipeline to CUDA: {str(e)}")
123
+ print("Falling back to CPU")
124
+
125
 
126
  # ์ €์žฅ ๋””๋ ‰ํ† ๋ฆฌ ์„ค์ •
127
  SAVE_DIR = "saved_images"