AlexK-PL commited on
Commit
689b3d3
·
1 Parent(s): 282a36c

Delete loss_function.py

Browse files
Files changed (1) hide show
  1. loss_function.py +0 -25
loss_function.py DELETED
@@ -1,25 +0,0 @@
1
- from torch import nn
2
-
3
-
4
- class Tacotron2Loss(nn.Module):
5
- def __init__(self):
6
- super(Tacotron2Loss, self).__init__()
7
-
8
- def forward(self, model_output, targets):
9
- mel_target, gate_target = targets[0], targets[1]
10
- mel_target.requires_grad = False
11
- gate_target.requires_grad = False
12
- # Ensures dimension 1 will be size 1, the rest can be adapted. It is a column of length 189 with all zeroes
13
- # till the end of the current sequence, which is filled with 1's
14
- gate_target = gate_target.view(-1, 1)
15
-
16
- mel_out, mel_out_postnet, gate_out, _, _ = model_output
17
- gate_out = gate_out.view(-1, 1)
18
- # Mean Square Error (L2) loss function for decoder generation + post net generation
19
- mel_loss = nn.MSELoss()(mel_out, mel_target) + \
20
- nn.MSELoss()(mel_out_postnet, mel_target)
21
- # Binary Cross Entropy with a Sigmoid layer combined. It is more efficient than using a plain Sigmoid
22
- # followed by a BCELoss as, by combining the operations into one layer, we take advantage of the log-sum-exp
23
- # trick for numerical stability
24
- gate_loss = nn.BCEWithLogitsLoss()(gate_out, gate_target)
25
- return mel_loss + gate_loss