Spaces:
Runtime error
Runtime error
suisuyy
commited on
Commit
·
00f1499
1
Parent(s):
09dab11
fix
Browse files- __pycache__/app.cpython-310.pyc +0 -0
- app.py +12 -11
__pycache__/app.cpython-310.pyc
CHANGED
|
Binary files a/__pycache__/app.cpython-310.pyc and b/__pycache__/app.cpython-310.pyc differ
|
|
|
app.py
CHANGED
|
@@ -74,18 +74,19 @@ def update_gpu_status():
|
|
| 74 |
|
| 75 |
except Exception as e:
|
| 76 |
print(f"Error getting GPU stats: {e}")
|
| 77 |
-
|
| 78 |
-
|
| 79 |
-
|
| 80 |
-
|
| 81 |
-
|
| 82 |
-
|
| 83 |
-
|
|
|
|
| 84 |
|
| 85 |
-
|
| 86 |
-
|
| 87 |
-
|
| 88 |
-
|
| 89 |
|
| 90 |
def update_cpu_status():
|
| 91 |
import datetime
|
|
|
|
| 74 |
|
| 75 |
except Exception as e:
|
| 76 |
print(f"Error getting GPU stats: {e}")
|
| 77 |
+
return torch_update_gpu_status()
|
| 78 |
+
|
| 79 |
+
def torch_update_gpu_status():
|
| 80 |
+
if torch.cuda.is_available():
|
| 81 |
+
gpu_info = torch.cuda.get_device_name(0)
|
| 82 |
+
gpu_memory = torch.cuda.mem_get_info(0)
|
| 83 |
+
total_memory = gpu_memory[1] / (1024 * 1024)
|
| 84 |
+
used_memory = (gpu_memory[1] - gpu_memory[0]) / (1024 * 1024)
|
| 85 |
|
| 86 |
+
gpu_status = f"GPU: {gpu_info}\nTotal Memory: {total_memory:.2f} MB\nUsed Memory: {used_memory:.2f} MB"
|
| 87 |
+
else:
|
| 88 |
+
gpu_status = "No GPU available"
|
| 89 |
+
return gpu_status
|
| 90 |
|
| 91 |
def update_cpu_status():
|
| 92 |
import datetime
|