Skip to content

Commit

Permalink
NN seems to work (technically, only)
Browse files Browse the repository at this point in the history
  • Loading branch information
tuetschek committed May 12, 2015
1 parent b748060 commit 2f8bb4d
Show file tree
Hide file tree
Showing 2 changed files with 23 additions and 7 deletions.
19 changes: 17 additions & 2 deletions tgen/nn.py
Original file line number Diff line number Diff line change
Expand Up @@ -178,11 +178,11 @@ def connect(self, inputs):
return output


class ConcatLayer(Layer):
class Concat(Layer):

def __init__(self, name):

super(ConcatLayer, self).__init__(name)
super(Concat, self).__init__(name)
self.params = []

def connect(self, inputs):
Expand All @@ -193,6 +193,21 @@ def connect(self, inputs):
return output


class Flatten(Layer):

def __init__(self, name):

super(Flatten, self).__init__(name)
self.params = []

def connect(self, inputs):

output = inputs.reshape((T.prod(inputs.shape),))
self.inputs.append(inputs)
self.outputs.append(output)
return output


class NN(object):
"""A Theano neural network for ranking with perceptron cost function."""

Expand Down
11 changes: 6 additions & 5 deletions tgen/rank_nn.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@
import theano.tensor as T
import numpy as np

from tgen.nn import FeedForwardLayer, ConcatLayer, MaxPool1DLayer, Embedding, NN
from tgen.nn import FeedForwardLayer, Concat, Flatten, MaxPool1DLayer, Embedding, NN
from tgen.rank import BasePerceptronRanker, FeaturesPerceptronRanker
from tgen.logf import log_debug, log_info

Expand Down Expand Up @@ -179,7 +179,7 @@ def _score(self, cand_embs):

def _extract_feats(self, tree, da):

# DA embeddings
# DA embeddings (slot - value; size == 2x self.max_da_len)
da_emb_idxs = []
for dai in da[:self.max_da_len]:
da_emb_idxs.append(self.dict_slot.get(dai.name, self.UNK_SLOT))
Expand All @@ -189,7 +189,7 @@ def _extract_feats(self, tree, da):
for _ in xrange(len(da_emb_idxs) / 2, self.max_da_len):
da_emb_idxs.extend([self.UNK_SLOT, self.UNK_VALUE])

# tree embeddings
# tree embeddings (parent_lemma - formeme - lemma; size == 3x self.max_tree_len)
tree_emb_idxs = []
for parent_ord, (t_lemma, formeme) in zip(tree.parents[1:self.max_tree_len + 1],
tree.nodes[1:self.max_tree_len + 1]):
Expand All @@ -209,8 +209,9 @@ def _init_neural_network(self):
Embedding('emb_trees', self.dict_size, self.emb_size, 'uniform_005')],
[MaxPool1DLayer('mp_das', self.max_da_len),
MaxPool1DLayer('mp_trees', self.max_tree_len)],
[ConcatLayer('concat')],
[FeedForwardLayer('ff1', self.emb_size * 2, self.num_hidden_units,
[Concat('concat')],
[Flatten('flatten')],
[FeedForwardLayer('ff1', self.emb_size * 5, self.num_hidden_units,
T.tanh, self.initialization)],
[FeedForwardLayer('ff2', self.num_hidden_units, self.num_hidden_units,
T.tanh, self.initialization)],
Expand Down

0 comments on commit 2f8bb4d

Please sign in to comment.