Spaces:
Paused
Paused
Delete CHATTS/utils/gpu_utils.py
Browse files- CHATTS/utils/gpu_utils.py +0 -23
CHATTS/utils/gpu_utils.py
DELETED
|
@@ -1,23 +0,0 @@
|
|
| 1 |
-
|
| 2 |
-
import torch
|
| 3 |
-
import logging
|
| 4 |
-
|
| 5 |
-
def select_device(min_memory = 2048):
|
| 6 |
-
logger = logging.getLogger(__name__)
|
| 7 |
-
if torch.cuda.is_available():
|
| 8 |
-
available_gpus = []
|
| 9 |
-
for i in range(torch.cuda.device_count()):
|
| 10 |
-
props = torch.cuda.get_device_properties(i)
|
| 11 |
-
free_memory = props.total_memory - torch.cuda.memory_reserved(i)
|
| 12 |
-
available_gpus.append((i, free_memory))
|
| 13 |
-
selected_gpu, max_free_memory = max(available_gpus, key=lambda x: x[1])
|
| 14 |
-
device = torch.device(f'cuda:{selected_gpu}')
|
| 15 |
-
free_memory_mb = max_free_memory / (1024 * 1024)
|
| 16 |
-
if free_memory_mb < min_memory:
|
| 17 |
-
logger.log(logging.WARNING, f'GPU {selected_gpu} has {round(free_memory_mb, 2)} MB memory left.')
|
| 18 |
-
device = torch.device('cpu')
|
| 19 |
-
else:
|
| 20 |
-
logger.log(logging.WARNING, f'No GPU found, use CPU instead')
|
| 21 |
-
device = torch.device('cpu')
|
| 22 |
-
|
| 23 |
-
return device
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|