File size: 211 Bytes
c61ccee
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
from importlib.util import find_spec

import torch

__all__ = ["amp_definitely_not_available"]


def amp_definitely_not_available():
    return not (torch.cuda.is_available() or find_spec("torch_xla"))