Spaces:
Running
on
Zero
Running
on
Zero
File size: 2,466 Bytes
6073e55 23fdbc0 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 |
# Copyright (c) 2025 Ye Liu. Licensed under the BSD-3-Clause License.
import math
import torch
import torch.nn as nn
import torch.nn.functional as F
from nncore.nn import Parameter
class Permute(nn.Module):
def forward(self, x):
return x.transpose(-1, -2)
class LearnableEmbedding(nn.Module):
def __init__(self, dims):
super().__init__()
self.weights = Parameter(1, 1, dims)
def forward(self, x):
return x + self.weights.expand_as(x)
class ConvPyramid(nn.Module):
def __init__(self, dims, strides, act_cls=nn.ReLU):
super().__init__()
self.blocks = nn.ModuleList()
for s in strides:
p = int(math.log2(s))
if p == 0:
layers = act_cls()
else:
conv_cls = nn.Conv1d if p > 0 else nn.ConvTranspose1d
layers = nn.Sequential()
for _ in range(abs(p)):
module = [Permute(), conv_cls(dims, dims, 2, stride=2), Permute(), nn.LayerNorm(dims), act_cls()]
layers.extend(module)
self.blocks.append(layers)
self.strides = strides
def forward(self, x, mask, return_mask=False):
pymid, pymid_msk = [], []
for s, blk in zip(self.strides, self.blocks):
if x.size(1) < s:
continue
pymid.append(blk(x))
if return_mask:
if s > 1:
msk = F.max_pool1d(mask.float(), s, stride=s).long()
elif s < 1:
msk = mask.repeat_interleave(int(1 / s), dim=1)
else:
msk = mask
pymid_msk.append(msk)
return (pymid, pymid_msk) if return_mask else pymid
class Scale(nn.Module):
def __init__(self, strides):
super().__init__()
self.scale = nn.Parameter(torch.ones(len(strides)))
def forward(self, x, i):
return x * self.scale[i]
class ConvHead(nn.Module):
def __init__(self, dims, out_dims, kernal_size=3, act_cls=nn.ReLU):
super().__init__()
# yapf:disable
self.module = nn.Sequential(
Permute(),
nn.Conv1d(dims, dims, kernal_size, padding=kernal_size // 2),
act_cls(),
nn.Conv1d(dims, out_dims, kernal_size, padding=kernal_size // 2),
Permute())
# yapf:enable
def forward(self, x):
return self.module(x)
|