Skip to content

Commit

Permalink
Fix Conditional RNNs input specs
Browse files Browse the repository at this point in the history
  • Loading branch information
lvapeab committed Jan 2, 2018
1 parent 52f32cf commit 3b50f10
Showing 1 changed file with 11 additions and 1 deletion.
12 changes: 11 additions & 1 deletion keras/layers/recurrent.py
Original file line number Diff line number Diff line change
Expand Up @@ -1667,6 +1667,7 @@ def __init__(self, units,
recurrent_dropout=0.,
conditional_dropout=0.,
num_inputs=4,
static_ctx=False,
**kwargs):

super(GRUCond, self).__init__(**kwargs)
Expand Down Expand Up @@ -1705,6 +1706,10 @@ def __init__(self, units,
self.conditional_dropout = min(1., max(0., conditional_dropout)) if conditional_dropout is not None else 0.
self.num_inputs = num_inputs
self.input_spec = [InputSpec(ndim=3), InputSpec(ndim=3)]
if static_ctx:
self.input_spec = [InputSpec(ndim=3), InputSpec(ndim=2)]
else:
self.input_spec = [InputSpec(ndim=3), InputSpec(ndim=3)]
for _ in range(len(self.input_spec), self.num_inputs):
self.input_spec.append(InputSpec(ndim=2))

Expand Down Expand Up @@ -4226,6 +4231,7 @@ def __init__(self, units,
recurrent_dropout=0.,
conditional_dropout=0.,
num_inputs=4,
static_ctx=False,
**kwargs):

super(LSTMCond, self).__init__(**kwargs)
Expand Down Expand Up @@ -4271,7 +4277,11 @@ def __init__(self, units,
self.recurrent_dropout = min(1., max(0., recurrent_dropout))if recurrent_dropout is not None else 0.
self.conditional_dropout = min(1., max(0., conditional_dropout))if conditional_dropout is not None else 0.
self.num_inputs = num_inputs
self.input_spec = [InputSpec(ndim=3), InputSpec(ndim=2)]
if static_ctx:
self.input_spec = [InputSpec(ndim=3), InputSpec(ndim=2)]
else:
self.input_spec = [InputSpec(ndim=3), InputSpec(ndim=3)]

for _ in range(len(self.input_spec), self.num_inputs):
self.input_spec.append(InputSpec(ndim=2))

Expand Down

0 comments on commit 3b50f10

Please sign in to comment.