Skip to content
Permalink
Branch: master
Find file Copy path
Find file Copy path
1 contributor

Users who have contributed to this file

56 lines (48 sloc) 1.8 KB
# @Author : bamtercelboo
# @Datetime : 2018/07/19 22:35
# @File : model_BiLSTM.py
# @Last Modify Time : 2018/07/19 22:35
# @Contact : bamtercelboo@{gmail.com, 163.com}
import torch
import torch.nn as nn
import torch.nn.functional as F
from torch.autograd import Variable
import numpy as np
import random
from DataUtils.Common import seed_num
torch.manual_seed(seed_num)
random.seed(seed_num)
"""
Neural Networks model : Bidirection LSTM
"""
class BiLSTM(nn.Module):
def __init__(self, args):
super(BiLSTM, self).__init__()
self.args = args
self.hidden_dim = args.lstm_hidden_dim
self.num_layers = args.lstm_num_layers
V = args.embed_num
D = args.embed_dim
C = args.class_num
# self.embed = nn.Embedding(V, D, max_norm=config.max_norm)
self.embed = nn.Embedding(V, D, padding_idx=args.paddingId)
# pretrained embedding
if args.word_Embedding:
self.embed.weight.data.copy_(args.pretrained_weight)
self.bilstm = nn.LSTM(D, self.hidden_dim // 2, num_layers=1, dropout=args.dropout, bidirectional=True, bias=False)
print(self.bilstm)
self.hidden2label1 = nn.Linear(self.hidden_dim, self.hidden_dim // 2)
self.hidden2label2 = nn.Linear(self.hidden_dim // 2, C)
# self.dropout = nn.Dropout(config.dropout)
def forward(self, x):
embed = self.embed(x)
x = embed.view(len(x), embed.size(1), -1)
bilstm_out, _ = self.bilstm(x)
bilstm_out = torch.transpose(bilstm_out, 0, 1)
bilstm_out = torch.transpose(bilstm_out, 1, 2)
bilstm_out = F.tanh(bilstm_out)
bilstm_out = F.max_pool1d(bilstm_out, bilstm_out.size(2)).squeeze(2)
y = self.hidden2label1(bilstm_out)
y = self.hidden2label2(y)
logit = y
return logit
You can’t perform that action at this time.