Spaces:
Paused
Paused
Update app.py
Browse files
app.py
CHANGED
@@ -301,16 +301,12 @@ def _process(img: Image.Image, prompt: str | BoundingBox | None, bg_prompt: str
|
|
301 |
new_size = (int(img.width * ratio), int(img.height * ratio))
|
302 |
img = img.resize(new_size, Image.LANCZOS)
|
303 |
|
304 |
-
# CUDA 메모리 관리
|
305 |
-
|
306 |
-
|
307 |
-
|
308 |
-
|
309 |
-
|
310 |
-
except Exception as e:
|
311 |
-
print(f"CUDA memory management failed: {e}")
|
312 |
-
|
313 |
-
with torch.cuda.amp.autocast(enabled=torch.cuda.is_available()):
|
314 |
mask, bbox, time_log = _gpu_process(img, prompt)
|
315 |
masked_alpha = apply_mask(img, mask, defringe=True)
|
316 |
|
|
|
301 |
new_size = (int(img.width * ratio), int(img.height * ratio))
|
302 |
img = img.resize(new_size, Image.LANCZOS)
|
303 |
|
304 |
+
# CUDA 메모리 관리
|
305 |
+
if torch.cuda.is_available():
|
306 |
+
torch.cuda.empty_cache()
|
307 |
+
|
308 |
+
# 새로운 autocast 구문 사용
|
309 |
+
with torch.amp.autocast('cuda', enabled=torch.cuda.is_available()):
|
|
|
|
|
|
|
|
|
310 |
mask, bbox, time_log = _gpu_process(img, prompt)
|
311 |
masked_alpha = apply_mask(img, mask, defringe=True)
|
312 |
|