ginipick commited on
Commit
7daf15f
·
verified ·
1 Parent(s): 105afff

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +4 -3
app.py CHANGED
@@ -296,10 +296,10 @@ def combine_with_background(foreground: Image.Image, background: Image.Image,
296
  result.paste(scaled_foreground, (x, y), scaled_foreground)
297
  return result
298
 
299
- @spaces.GPU(duration=30)
300
  def _gpu_process(img: Image.Image, prompt: str | BoundingBox | None) -> tuple[Image.Image, BoundingBox | None, list[str]]:
301
  try:
302
- with torch.inference_mode():
303
  if isinstance(prompt, str):
304
  bbox = gd_detect(img, prompt)
305
  if not bbox:
@@ -309,10 +309,11 @@ def _gpu_process(img: Image.Image, prompt: str | BoundingBox | None) -> tuple[Im
309
 
310
  mask = segmenter(img, bbox)
311
  return mask, bbox, []
312
-
313
  except Exception as e:
314
  print(f"GPU process error: {str(e)}")
315
  raise
 
 
316
 
317
  def _process(img: Image.Image, prompt: str | BoundingBox | None, bg_prompt: str | None = None, aspect_ratio: str = "1:1") -> tuple[tuple[Image.Image, Image.Image, Image.Image], gr.DownloadButton]:
318
  try:
 
296
  result.paste(scaled_foreground, (x, y), scaled_foreground)
297
  return result
298
 
299
+ @spaces.GPU(duration=20) # 30초에서 20초로 감소
300
  def _gpu_process(img: Image.Image, prompt: str | BoundingBox | None) -> tuple[Image.Image, BoundingBox | None, list[str]]:
301
  try:
302
+ with torch.inference_mode(), torch.amp.autocast('cuda', enabled=torch.cuda.is_available()):
303
  if isinstance(prompt, str):
304
  bbox = gd_detect(img, prompt)
305
  if not bbox:
 
309
 
310
  mask = segmenter(img, bbox)
311
  return mask, bbox, []
 
312
  except Exception as e:
313
  print(f"GPU process error: {str(e)}")
314
  raise
315
+ finally:
316
+ clear_memory()
317
 
318
  def _process(img: Image.Image, prompt: str | BoundingBox | None, bg_prompt: str | None = None, aspect_ratio: str = "1:1") -> tuple[tuple[Image.Image, Image.Image, Image.Image], gr.DownloadButton]:
319
  try: