Spaces:
Paused
Paused
Update app.py
Browse files
app.py
CHANGED
|
@@ -18,8 +18,10 @@ from trellis.pipelines import TrellisImageTo3DPipeline
|
|
| 18 |
from trellis.representations import Gaussian, MeshExtractResult
|
| 19 |
from trellis.utils import render_utils, postprocessing_utils
|
| 20 |
from diffusers import FluxPipeline
|
| 21 |
-
from transformers import pipeline
|
| 22 |
from typing import Tuple, Dict, Any # Tuple import 추가
|
|
|
|
|
|
|
|
|
|
| 23 |
|
| 24 |
# CUDA 메모리 관리 설정
|
| 25 |
torch.cuda.empty_cache()
|
|
@@ -328,13 +330,13 @@ if __name__ == "__main__":
|
|
| 328 |
|
| 329 |
try:
|
| 330 |
# 3D 생성 파이프라인
|
| 331 |
-
trellis_pipeline = TrellisImageTo3DPipeline.from_pretrained(
|
| 332 |
"JeffreyXiang/TRELLIS-image-large"
|
| 333 |
)
|
| 334 |
trellis_pipeline.to(device)
|
| 335 |
|
| 336 |
# 이미지 생성 파이프라인
|
| 337 |
-
flux_pipe = FluxPipeline.from_pretrained(
|
| 338 |
"black-forest-labs/FLUX.1-dev",
|
| 339 |
torch_dtype=torch.bfloat16,
|
| 340 |
device_map="balanced"
|
|
@@ -350,7 +352,7 @@ if __name__ == "__main__":
|
|
| 350 |
flux_pipe.fuse_lora(lora_scale=0.125)
|
| 351 |
|
| 352 |
# 번역기 초기화
|
| 353 |
-
translator =
|
| 354 |
"translation",
|
| 355 |
model="Helsinki-NLP/opus-mt-ko-en",
|
| 356 |
device=device
|
|
|
|
| 18 |
from trellis.representations import Gaussian, MeshExtractResult
|
| 19 |
from trellis.utils import render_utils, postprocessing_utils
|
| 20 |
from diffusers import FluxPipeline
|
|
|
|
| 21 |
from typing import Tuple, Dict, Any # Tuple import 추가
|
| 22 |
+
# 파일 상단의 import 문 수정
|
| 23 |
+
import transformers
|
| 24 |
+
from transformers import pipeline as transformers_pipeline
|
| 25 |
|
| 26 |
# CUDA 메모리 관리 설정
|
| 27 |
torch.cuda.empty_cache()
|
|
|
|
| 330 |
|
| 331 |
try:
|
| 332 |
# 3D 생성 파이프라인
|
| 333 |
+
trellis_pipeline = TrellisImageTo3DPipeline.from_pretrained(
|
| 334 |
"JeffreyXiang/TRELLIS-image-large"
|
| 335 |
)
|
| 336 |
trellis_pipeline.to(device)
|
| 337 |
|
| 338 |
# 이미지 생성 파이프라인
|
| 339 |
+
flux_pipe = FluxPipeline.from_pretrained(
|
| 340 |
"black-forest-labs/FLUX.1-dev",
|
| 341 |
torch_dtype=torch.bfloat16,
|
| 342 |
device_map="balanced"
|
|
|
|
| 352 |
flux_pipe.fuse_lora(lora_scale=0.125)
|
| 353 |
|
| 354 |
# 번역기 초기화
|
| 355 |
+
translator = transformers_pipeline( # transformers.pipeline 대신 transformers_pipeline 사용
|
| 356 |
"translation",
|
| 357 |
model="Helsinki-NLP/opus-mt-ko-en",
|
| 358 |
device=device
|