File size: 737 Bytes
c61ccee
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
import torch

__all__ = ["GradScaler"]


class GradScaler(torch.amp.GradScaler):
    r"""

    See :class:`torch.amp.GradScaler`.

    ``torch.cpu.amp.GradScaler(args...)`` is equivalent to ``torch.amp.GradScaler("cpu", args...)``

    """

    def __init__(

        self,

        init_scale: float = 2.0**16,

        growth_factor: float = 2.0,

        backoff_factor: float = 0.5,

        growth_interval: int = 2000,

        enabled: bool = True,

    ) -> None:
        super().__init__(
            "cpu",
            init_scale=init_scale,
            growth_factor=growth_factor,
            backoff_factor=backoff_factor,
            growth_interval=growth_interval,
            enabled=enabled,
        )