Skip to content

Commit

Permalink
Standardize **kargs order in recurrent layers.
Browse files Browse the repository at this point in the history
  • Loading branch information
fchollet committed Apr 10, 2015
1 parent f953508 commit 6cd076a
Showing 1 changed file with 3 additions and 3 deletions.
6 changes: 3 additions & 3 deletions keras/layers/recurrent.py
Original file line number Diff line number Diff line change
Expand Up @@ -103,7 +103,7 @@ def _step(self, *args):
o = args[0]
for i in range(1, self.depth+1):
o += self.inner_activation(T.dot(args[i], args[i+self.depth]))
return o
return self.activation(o)

def output(self, train):
X = self.get_input(train)
Expand Down Expand Up @@ -152,7 +152,7 @@ class GRU(Layer):
def __init__(self, input_dim, output_dim=128,
init='uniform', inner_init='orthogonal',
activation='sigmoid', inner_activation='hard_sigmoid',
truncate_gradient=-1, weights=None, return_sequences=False):
weights=None, truncate_gradient=-1, return_sequences=False):

self.input_dim = input_dim
self.output_dim = output_dim
Expand Down Expand Up @@ -244,7 +244,7 @@ class LSTM(Layer):
def __init__(self, input_dim, output_dim=128,
init='uniform', inner_init='orthogonal',
activation='tanh', inner_activation='hard_sigmoid',
truncate_gradient=-1, weights=None, return_sequences=False):
weights=None, truncate_gradient=-1, return_sequences=False):

self.input_dim = input_dim
self.output_dim = output_dim
Expand Down

0 comments on commit 6cd076a

Please sign in to comment.