Spaces:
Paused
Paused
revert commit
Browse files
app.py
CHANGED
@@ -229,7 +229,31 @@ def gpu_memory_manager():
|
|
229 |
torch.cuda.empty_cache()
|
230 |
gc.collect()
|
231 |
|
232 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
233 |
def generate_text(image, prompt, max_tokens):
|
234 |
try:
|
235 |
with gpu_memory_manager():
|
@@ -264,7 +288,8 @@ def generate_text(image, prompt, max_tokens):
|
|
264 |
traceback.print_exc()
|
265 |
return f"Error: {str(e)}", 0, 0
|
266 |
|
267 |
-
@spaces.GPU()
|
|
|
268 |
def calculate_similarity(image1, image2):
|
269 |
try:
|
270 |
with gpu_memory_manager():
|
|
|
229 |
torch.cuda.empty_cache()
|
230 |
gc.collect()
|
231 |
|
232 |
+
def cuda_error_handler(func):
|
233 |
+
def wrapper(*args, **kwargs):
|
234 |
+
try:
|
235 |
+
return func(*args, **kwargs)
|
236 |
+
except RuntimeError as e:
|
237 |
+
if "CUDA" in str(e):
|
238 |
+
print(f"CUDA error occurred: {str(e)}")
|
239 |
+
print("Attempting to recover...")
|
240 |
+
torch.cuda.empty_cache()
|
241 |
+
gc.collect()
|
242 |
+
try:
|
243 |
+
return func(*args, **kwargs)
|
244 |
+
except Exception as e2:
|
245 |
+
print(f"Recovery failed. Error: {str(e2)}")
|
246 |
+
return f"An error occurred: {str(e2)}", 0, 0
|
247 |
+
else:
|
248 |
+
raise
|
249 |
+
except Exception as e:
|
250 |
+
print(f"An unexpected error occurred: {str(e)}")
|
251 |
+
traceback.print_exc()
|
252 |
+
return f"An unexpected error occurred: {str(e)}", 0, 0
|
253 |
+
return wrapper
|
254 |
+
|
255 |
+
@spaces.GPU(duration=120)
|
256 |
+
@cuda_error_handler
|
257 |
def generate_text(image, prompt, max_tokens):
|
258 |
try:
|
259 |
with gpu_memory_manager():
|
|
|
288 |
traceback.print_exc()
|
289 |
return f"Error: {str(e)}", 0, 0
|
290 |
|
291 |
+
@spaces.GPU(duration=60)
|
292 |
+
@cuda_error_handler
|
293 |
def calculate_similarity(image1, image2):
|
294 |
try:
|
295 |
with gpu_memory_manager():
|