Spaces:
Runtime error
Runtime error
Fix: construct our Hardswish activation
Browse files
src/digitizer/yolov5/models/common.py
CHANGED
|
@@ -3,6 +3,48 @@ import math
|
|
| 3 |
|
| 4 |
import torch
|
| 5 |
import torch.nn as nn
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 6 |
|
| 7 |
|
| 8 |
def autopad(k, p=None): # kernel, padding
|
|
@@ -23,7 +65,7 @@ class Conv(nn.Module):
|
|
| 23 |
super(Conv, self).__init__()
|
| 24 |
self.conv = nn.Conv2d(c1, c2, k, s, autopad(k, p), groups=g, bias=False)
|
| 25 |
self.bn = nn.BatchNorm2d(c2)
|
| 26 |
-
self.act =
|
| 27 |
|
| 28 |
def forward(self, x):
|
| 29 |
return self.act(self.bn(self.conv(x)))
|
|
|
|
| 3 |
|
| 4 |
import torch
|
| 5 |
import torch.nn as nn
|
| 6 |
+
import torch.nn.functional as F
|
| 7 |
+
|
| 8 |
+
# Avoid the use of original Hardswish at torch to avoid the error:
|
| 9 |
+
class Hardswish(nn.Module):
|
| 10 |
+
r"""Applies the hardswish function, element-wise, as described in the paper:
|
| 11 |
+
|
| 12 |
+
`Searching for MobileNetV3`_.
|
| 13 |
+
|
| 14 |
+
.. math::
|
| 15 |
+
\text{Hardswish}(x) = \begin{cases}
|
| 16 |
+
0 & \text{if~} x \le -3, \\
|
| 17 |
+
x & \text{if~} x \ge +3, \\
|
| 18 |
+
x \cdot (x + 3) /6 & \text{otherwise}
|
| 19 |
+
\end{cases}
|
| 20 |
+
|
| 21 |
+
Args:
|
| 22 |
+
inplace: can optionally do the operation in-place. Default: ``False``
|
| 23 |
+
|
| 24 |
+
Shape:
|
| 25 |
+
- Input: :math:`(N, *)` where `*` means, any number of additional
|
| 26 |
+
dimensions
|
| 27 |
+
- Output: :math:`(N, *)`, same shape as the input
|
| 28 |
+
|
| 29 |
+
Examples::
|
| 30 |
+
|
| 31 |
+
>>> m = nn.Hardswish()
|
| 32 |
+
>>> input = torch.randn(2)
|
| 33 |
+
>>> output = m(input)
|
| 34 |
+
|
| 35 |
+
.. _`Searching for MobileNetV3`:
|
| 36 |
+
https://arxiv.org/abs/1905.02244
|
| 37 |
+
"""
|
| 38 |
+
__constants__ = ['inplace']
|
| 39 |
+
|
| 40 |
+
inplace: bool
|
| 41 |
+
|
| 42 |
+
def __init__(self, inplace : bool = False) -> None:
|
| 43 |
+
super(Hardswish, self).__init__()
|
| 44 |
+
self.inplace = inplace
|
| 45 |
+
|
| 46 |
+
def forward(self, input: Tensor) -> Tensor:
|
| 47 |
+
return F.hardswish(input)
|
| 48 |
|
| 49 |
|
| 50 |
def autopad(k, p=None): # kernel, padding
|
|
|
|
| 65 |
super(Conv, self).__init__()
|
| 66 |
self.conv = nn.Conv2d(c1, c2, k, s, autopad(k, p), groups=g, bias=False)
|
| 67 |
self.bn = nn.BatchNorm2d(c2)
|
| 68 |
+
self.act = Hardswish() if act else nn.Identity()
|
| 69 |
|
| 70 |
def forward(self, x):
|
| 71 |
return self.act(self.bn(self.conv(x)))
|