-
Notifications
You must be signed in to change notification settings - Fork 0
/
modelling.py
executable file
·73 lines (56 loc) · 2.17 KB
/
modelling.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Mon Feb 8 21:15:48 2021
@author: karma
"""
import torch
import torch.nn as nn
class LSTMClassifier(nn.Module):
#define all the layers used in model
def __init__(self, vocab_size, embedding_dim, hidden_dim, num_layers,
bidirectional, dropout):
#Constructor
super().__init__()
#No of neurons in output layer/num of classes
output_dim = 4
#embedding layer
self.embedding = nn.Embedding(vocab_size, embedding_dim)
#lstm layer
self.lstm = nn.LSTM(embedding_dim,
hidden_dim,
num_layers=num_layers,
bidirectional=bidirectional,
dropout=dropout,
batch_first=True)
#dense layer
self.bidirectional = bidirectional
if bidirectional:
self.fc = nn.Linear(hidden_dim * 2, output_dim)
else:
self.fc = nn.Linear(hidden_dim, output_dim)
#activation function
self.act = nn.Sigmoid()
def forward(self, text, text_lengths):
#text = [batch size,sent_length]
embedded = self.embedding(text)
#embedded = [batch size, sent_len, emb dim]
#packed sequence
packed_embedded = nn.utils.rnn.pack_padded_sequence(
embedded,
text_lengths.cpu(),
batch_first=True)
self.lstm.flatten_parameters()
packed_output, (hidden, cell) = self.lstm(packed_embedded)
#hidden = [batch size, num layers * num directions,hid dim]
#cell = [batch size, num layers * num directions,hid dim]
#concat the final forward and backward hidden state
if self.bidirectional:
hidden = torch.cat((hidden[-2,:,:], hidden[-1,:,:]), dim = 1)
else:
hidden = hidden[-1,:,:]
#hidden = [batch size, hid dim * num directions]
dense_outputs=self.fc(hidden)
#Final activation function
outputs=self.act(dense_outputs)
return outputs