File size: 919 Bytes
1d117d0
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
import torch
import torch.nn as nn
from modules.cond import cast


class GEGLU(nn.Module):
    """#### Class representing the GEGLU activation function.

    GEGLU is a gated activation function that is a combination of GELU and ReLU,
    used to fire the neurons in the network.

    #### Args:
        - `dim_in` (int): The input dimension.
        - `dim_out` (int): The output dimension.
    """

    def __init__(self, dim_in: int, dim_out: int):
        super().__init__()
        self.proj = cast.manual_cast.Linear(dim_in, dim_out * 2)

    def forward(self, x: torch.Tensor) -> torch.Tensor:
        """#### Forward pass for the GEGLU activation function.

        #### Args:
            - `x` (torch.Tensor): The input tensor.

        #### Returns:
            - `torch.Tensor`: The output tensor.
        """
        x, gate = self.proj(x).chunk(2, dim=-1)
        return x * torch.nn.functional.gelu(gate)