aiqtech commited on
Commit
db81a6b
·
verified ·
1 Parent(s): ef607fe

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +5 -3
app.py CHANGED
@@ -19,10 +19,11 @@ from trellis.representations import Gaussian, MeshExtractResult
19
  from trellis.utils import render_utils, postprocessing_utils
20
  from diffusers import FluxPipeline
21
  from typing import Tuple, Dict, Any # Tuple import 추가
 
22
  # 파일 상단의 import 문 수정
23
  import transformers
24
  from transformers import pipeline as transformers_pipeline
25
-
26
  # CUDA 메모리 관리 설정
27
  torch.cuda.empty_cache()
28
  torch.backends.cuda.matmul.allow_tf32 = True
@@ -176,7 +177,7 @@ def text_to_image(prompt: str, height: int, width: int, steps: int, scales: floa
176
 
177
  with torch.inference_mode(), torch.autocast("cuda", dtype=torch.bfloat16):
178
  try:
179
- generated_image = pipe(
180
  prompt=[formatted_prompt],
181
  generator=torch.Generator().manual_seed(int(seed)),
182
  num_inference_steps=int(steps),
@@ -352,7 +353,8 @@ if __name__ == "__main__":
352
  flux_pipe.fuse_lora(lora_scale=0.125)
353
 
354
  # 번역기 초기화
355
- translator = transformers_pipeline( # transformers.pipeline 대신 transformers_pipeline 사용
 
356
  "translation",
357
  model="Helsinki-NLP/opus-mt-ko-en",
358
  device=device
 
19
  from trellis.utils import render_utils, postprocessing_utils
20
  from diffusers import FluxPipeline
21
  from typing import Tuple, Dict, Any # Tuple import 추가
22
+
23
  # 파일 상단의 import 문 수정
24
  import transformers
25
  from transformers import pipeline as transformers_pipeline
26
+ from transformers import Pipeline # Pipeline 타입 힌트를 위해 추가
27
  # CUDA 메모리 관리 설정
28
  torch.cuda.empty_cache()
29
  torch.backends.cuda.matmul.allow_tf32 = True
 
177
 
178
  with torch.inference_mode(), torch.autocast("cuda", dtype=torch.bfloat16):
179
  try:
180
+ generated_image = flux_pipe( # pipe를 flux_pipe로 변경
181
  prompt=[formatted_prompt],
182
  generator=torch.Generator().manual_seed(int(seed)),
183
  num_inference_steps=int(steps),
 
353
  flux_pipe.fuse_lora(lora_scale=0.125)
354
 
355
  # 번역기 초기화
356
+ global translator
357
+ translator = transformers_pipeline(
358
  "translation",
359
  model="Helsinki-NLP/opus-mt-ko-en",
360
  device=device