danifei commited on
Commit
eb1e18a
·
verified ·
1 Parent(s): 84d2b19

add LayerNorm()

Browse files
Files changed (1) hide show
  1. archs/arch_util.py +38 -0
archs/arch_util.py CHANGED
@@ -71,3 +71,41 @@ class ResidualBlock(nn.Module):
71
  out = F.relu(self.bn(self.conv1(x)), inplace=True)
72
  out = self.conv2(out)
73
  return identity + out
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
71
  out = F.relu(self.bn(self.conv1(x)), inplace=True)
72
  out = self.conv2(out)
73
  return identity + out
74
+
75
+ class LayerNormFunction(torch.autograd.Function):
76
+
77
+ @staticmethod
78
+ def forward(ctx, x, weight, bias, eps):
79
+ ctx.eps = eps
80
+ N, C, H, W = x.size()
81
+ mu = x.mean(1, keepdim=True)
82
+ var = (x - mu).pow(2).mean(1, keepdim=True)
83
+ y = (x - mu) / (var + eps).sqrt()
84
+ ctx.save_for_backward(y, var, weight)
85
+ y = weight.view(1, C, 1, 1) * y + bias.view(1, C, 1, 1)
86
+ return y
87
+
88
+ @staticmethod
89
+ def backward(ctx, grad_output):
90
+ eps = ctx.eps
91
+
92
+ N, C, H, W = grad_output.size()
93
+ y, var, weight = ctx.saved_variables
94
+ g = grad_output * weight.view(1, C, 1, 1)
95
+ mean_g = g.mean(dim=1, keepdim=True)
96
+
97
+ mean_gy = (g * y).mean(dim=1, keepdim=True)
98
+ gx = 1. / torch.sqrt(var + eps) * (g - y * mean_gy - mean_g)
99
+ return gx, (grad_output * y).sum(dim=3).sum(dim=2).sum(dim=0), grad_output.sum(dim=3).sum(dim=2).sum(
100
+ dim=0), None
101
+
102
+ class LayerNorm2d(nn.Module):
103
+
104
+ def __init__(self, channels, eps=1e-6):
105
+ super(LayerNorm2d, self).__init__()
106
+ self.register_parameter('weight', nn.Parameter(torch.ones(channels)))
107
+ self.register_parameter('bias', nn.Parameter(torch.zeros(channels)))
108
+ self.eps = eps
109
+
110
+ def forward(self, x):
111
+ return LayerNormFunction.apply(x, self.weight, self.bias, self.eps)