File size: 1,035 Bytes
a87c588
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
import torch
import torch.nn as nn
import torch.nn.functional as F

device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')

class LSTM(nn.Module):
  def __init__(self, vocab_size, n_classes, hidden_dim, embedding_dim, n_layers, dropout, bidirectional = True):
    super(LSTM, self).__init__()
    
    self.n_layers = n_layers
    self.hidden_dim = hidden_dim
    self.embedding_dim = embedding_dim

    # Capas embedding y LSTM
    self.embedding = nn.Embedding(vocab_size, embedding_dim, device = device)
    self.lstm = nn.LSTM(embedding_dim, hidden_dim, n_layers, dropout = dropout, batch_first = True, bidirectional = bidirectional, device = device)
    
    # Dropout
    self.dropout = nn.Dropout(dropout)
    
    # Capa lineal
    self.fc = nn.Linear(hidden_dim * 2 if bidirectional else hidden_dim, n_classes, device = device)

  def forward(self, x):

    x = self.embedding(x)

    x, hidden = self.lstm(x)

    x = x[:, -1, :]

    x = self.dropout(x)

    output = self.fc(x)
    
    return output, hidden