Skip to content

Commit

Permalink
Keras 1 backwards compatibility (dropouts in recurrent.py)
Browse files Browse the repository at this point in the history
  • Loading branch information
lvapeab committed Dec 27, 2017
1 parent 8a4b9c6 commit 52f32cf
Showing 1 changed file with 40 additions and 34 deletions.
74 changes: 40 additions & 34 deletions keras/layers/recurrent.py
Original file line number Diff line number Diff line change
Expand Up @@ -833,8 +833,8 @@ def __init__(self, units,
self.recurrent_constraint = constraints.get(recurrent_constraint)
self.bias_constraint = constraints.get(bias_constraint)

self.dropout = min(1., max(0., dropout))
self.recurrent_dropout = min(1., max(0., recurrent_dropout))
self.dropout = min(1., max(0., dropout)) if dropout is not None else 0.
self.recurrent_dropout = min(1., max(0., recurrent_dropout)) if recurrent_dropout is not None else 0.
self.state_size = self.units
self._dropout_mask = None
self._recurrent_dropout_mask = None
Expand Down Expand Up @@ -987,6 +987,8 @@ def __init__(self, units,
warnings.warn('The `implementation` argument '
'in `SimpleRNN` has been deprecated. '
'Please remove it from your layer call.')
dropout = 0. if dropout is None else dropout
recurrent_dropout = 0. if recurrent_dropout is None else recurrent_dropout
if K.backend() == 'theano' and dropout + recurrent_dropout > 0.:
warnings.warn(
'RNN dropout is no longer supported with the Theano backend '
Expand Down Expand Up @@ -1195,8 +1197,8 @@ def __init__(self, units,
self.recurrent_constraint = constraints.get(recurrent_constraint)
self.bias_constraint = constraints.get(bias_constraint)

self.dropout = min(1., max(0., dropout))
self.recurrent_dropout = min(1., max(0., recurrent_dropout))
self.dropout = min(1., max(0., dropout)) if dropout is not None else 0.
self.recurrent_dropout = min(1., max(0., recurrent_dropout)) if recurrent_dropout is not None else 0.
self.implementation = implementation
self.state_size = self.units
self._dropout_mask = None
Expand Down Expand Up @@ -1432,6 +1434,8 @@ def __init__(self, units,
warnings.warn('`implementation=0` has been deprecated, '
'and now defaults to `implementation=2`.'
'Please update your layer call.')
dropout = 0. if dropout is None else dropout
recurrent_dropout = 0. if recurrent_dropout is None else recurrent_dropout
if K.backend() == 'theano' and dropout + recurrent_dropout > 0.:
warnings.warn(
'RNN dropout is no longer supported with the Theano backend '
Expand Down Expand Up @@ -1696,9 +1700,9 @@ def __init__(self, units,
self.bias_constraint = constraints.get(bias_constraint)

# Dropouts
self.dropout = min(1., max(0., dropout))
self.recurrent_dropout = min(1., max(0., recurrent_dropout))
self.conditional_dropout = min(1., max(0., conditional_dropout))
self.dropout = min(1., max(0., dropout)) if dropout is not None else 0.
self.recurrent_dropout = min(1., max(0., recurrent_dropout)) if recurrent_dropout is not None else 0.
self.conditional_dropout = min(1., max(0., conditional_dropout)) if conditional_dropout is not None else 0.
self.num_inputs = num_inputs
self.input_spec = [InputSpec(ndim=3), InputSpec(ndim=3)]
for _ in range(len(self.input_spec), self.num_inputs):
Expand Down Expand Up @@ -2153,9 +2157,9 @@ def __init__(self, units,
self.bias_ca_constraint = constraints.get(bias_ca_constraint)

# Dropouts
self.dropout = min(1., max(0., dropout))
self.recurrent_dropout = min(1., max(0., recurrent_dropout))
self.attention_dropout = min(1., max(0., attention_dropout))
self.dropout = min(1., max(0., dropout)) if dropout is not None else 0.
self.recurrent_dropout = min(1., max(0., recurrent_dropout)) if recurrent_dropout is not None else 0.
self.attention_dropout = min(1., max(0., attention_dropout)) if attention_dropout is not None else 0.
self.num_inputs = num_inputs
self.input_spec = [InputSpec(ndim=3)]
for _ in range(len(self.input_spec), self.num_inputs):
Expand Down Expand Up @@ -2662,10 +2666,10 @@ def __init__(self, units,
self.bias_ca_constraint = constraints.get(bias_ca_constraint)

# Dropouts
self.dropout = min(1., max(0., dropout))
self.recurrent_dropout = min(1., max(0., recurrent_dropout))
self.conditional_dropout = min(1., max(0., conditional_dropout))
self.attention_dropout = min(1., max(0., attention_dropout))
self.dropout = min(1., max(0., dropout)) if dropout is not None else 0.
self.recurrent_dropout = min(1., max(0., recurrent_dropout)) if recurrent_dropout is not None else 0.
self.conditional_dropout = min(1., max(0., conditional_dropout)) if conditional_dropout is not None else 0.
self.attention_dropout = min(1., max(0., attention_dropout)) if attention_dropout is not None else 0.
self.num_inputs = num_inputs
self.input_spec = [InputSpec(ndim=3), InputSpec(ndim=3)]
for _ in range(len(self.input_spec), self.num_inputs):
Expand Down Expand Up @@ -3221,10 +3225,10 @@ def __init__(self, units,
self.bias_ca_constraint = constraints.get(bias_ca_constraint)

# Dropouts
self.dropout = min(1., max(0., dropout))
self.recurrent_dropout = min(1., max(0., recurrent_dropout))
self.conditional_dropout = min(1., max(0., conditional_dropout))
self.attention_dropout = min(1., max(0., attention_dropout))
self.dropout = min(1., max(0., dropout)) if dropout is not None else 0.
self.recurrent_dropout = min(1., max(0., recurrent_dropout)) if recurrent_dropout is not None else 0.
self.conditional_dropout = min(1., max(0., conditional_dropout)) if conditional_dropout is not None else 0.
self.attention_dropout = min(1., max(0., attention_dropout)) if attention_dropout is not None else 0.
self.num_inputs = num_inputs
self.input_spec = [InputSpec(ndim=3), InputSpec(ndim=3)]
for _ in range(len(self.input_spec), self.num_inputs):
Expand Down Expand Up @@ -3731,8 +3735,8 @@ def __init__(self, units,
self.recurrent_constraint = constraints.get(recurrent_constraint)
self.bias_constraint = constraints.get(bias_constraint)

self.dropout = min(1., max(0., dropout))
self.recurrent_dropout = min(1., max(0., recurrent_dropout))
self.dropout = min(1., max(0., dropout)) if dropout is not None else 0.
self.recurrent_dropout = min(1., max(0., recurrent_dropout)) if recurrent_dropout is not None else 0.
self.implementation = implementation
self.state_size = (self.units, self.units)
self._dropout_mask = None
Expand Down Expand Up @@ -3991,6 +3995,8 @@ def __init__(self, units,
warnings.warn('`implementation=0` has been deprecated, '
'and now defaults to `implementation=2`.'
'Please update your layer call.')
dropout = 0. if dropout is None else dropout
recurrent_dropout = 0. if recurrent_dropout is None else recurrent_dropout
if K.backend() == 'theano' and dropout + recurrent_dropout > 0.:
warnings.warn(
'RNN dropout is no longer supported with the Theano backend '
Expand Down Expand Up @@ -4261,9 +4267,9 @@ def __init__(self, units,
self.bias_ca_constraint = constraints.get(bias_ca_constraint)

# Dropouts
self.dropout = min(1., max(0., dropout))
self.recurrent_dropout = min(1., max(0., recurrent_dropout))
self.conditional_dropout = min(1., max(0., conditional_dropout))
self.dropout = min(1., max(0., dropout))if dropout is not None else 0.
self.recurrent_dropout = min(1., max(0., recurrent_dropout))if recurrent_dropout is not None else 0.
self.conditional_dropout = min(1., max(0., conditional_dropout))if conditional_dropout is not None else 0.
self.num_inputs = num_inputs
self.input_spec = [InputSpec(ndim=3), InputSpec(ndim=2)]
for _ in range(len(self.input_spec), self.num_inputs):
Expand Down Expand Up @@ -4741,9 +4747,9 @@ def __init__(self, units,
self.bias_ca_constraint = constraints.get(bias_ca_constraint)

# Dropouts
self.dropout = min(1., max(0., dropout))
self.recurrent_dropout = min(1., max(0., recurrent_dropout))
self.attention_dropout = min(1., max(0., attention_dropout))
self.dropout = min(1., max(0., dropout))if dropout is not None else 0.
self.recurrent_dropout = min(1., max(0., recurrent_dropout))if recurrent_dropout is not None else 0.
self.attention_dropout = min(1., max(0., attention_dropout))if attention_dropout is not None else 0.
self.num_inputs = num_inputs
self.input_spec = [InputSpec(ndim=3)]
for _ in range(len(self.input_spec), self.num_inputs):
Expand Down Expand Up @@ -5268,10 +5274,10 @@ def __init__(self, units,
self.bias_ca_constraint = constraints.get(bias_ca_constraint)

# Dropouts
self.dropout = min(1., max(0., dropout))
self.recurrent_dropout = min(1., max(0., recurrent_dropout))
self.conditional_dropout = min(1., max(0., conditional_dropout))
self.attention_dropout = min(1., max(0., attention_dropout))
self.dropout = min(1., max(0., dropout)) if dropout is not None else 0.
self.recurrent_dropout = min(1., max(0., recurrent_dropout)) if recurrent_dropout is not None else 0.
self.conditional_dropout = min(1., max(0., conditional_dropout)) if conditional_dropout is not None else 0.
self.attention_dropout = min(1., max(0., attention_dropout)) if attention_dropout is not None else 0.
self.num_inputs = num_inputs
self.input_spec = [InputSpec(ndim=3), InputSpec(ndim=3)]
for _ in range(len(self.input_spec), self.num_inputs):
Expand Down Expand Up @@ -5849,10 +5855,10 @@ def __init__(self, units,
self.bias_ca_constraint = constraints.get(bias_ca_constraint)

# Dropouts
self.dropout = min(1., max(0., dropout))
self.recurrent_dropout = min(1., max(0., recurrent_dropout))
self.conditional_dropout = min(1., max(0., conditional_dropout))
self.attention_dropout = min(1., max(0., attention_dropout))
self.dropout = min(1., max(0., dropout))if dropout is not None else 0.
self.recurrent_dropout = min(1., max(0., recurrent_dropout))if recurrent_dropout is not None else 0.
self.conditional_dropout = min(1., max(0., conditional_dropout))if conditional_dropout is not None else 0.
self.attention_dropout = min(1., max(0., attention_dropout))if attention_dropout is not None else 0.

# Inputs
self.num_inputs = num_inputs
Expand Down

0 comments on commit 52f32cf

Please sign in to comment.