Skip to content

Commit

Permalink
Make non keras activation functions available
Browse files Browse the repository at this point in the history
  • Loading branch information
JarnoRFB committed Jul 6, 2018
1 parent c324c89 commit 47afb05
Showing 1 changed file with 16 additions and 1 deletion.
17 changes: 16 additions & 1 deletion iclr_wrap_up/models/feedforward.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,12 +7,27 @@
'adam': tf.train.AdamOptimizer,
}

activation_fn_map = {
'tanh': tf.nn.tanh,
'relu': tf.nn.relu,
'sigmoid': tf.nn.sigmoid,
'softsign': tf.nn.softsign,
'softplus': tf.nn.softplus,
'leaky_relu': tf.nn.leaky_relu,
'hard_sigmoid': 'hard_sigmoid',
'selu': tf.nn.selu,
'relu6': tf.nn.relu6,
'elu': tf.nn.elu,
'linear': 'linear'
}


def load(architecture, activation_fn, optimizer, learning_rate, input_size, output_size):
input_layer = keras.layers.Input((input_size,))
clayer = input_layer
for n in architecture:
clayer = keras.layers.Dense(n,
activation=activation_fn,
activation=activation_fn_map[activation_fn],
kernel_initializer=keras.initializers.TruncatedNormal(mean=0.0,
stddev=1 / np.sqrt(float(n)),
seed=None),
Expand Down

0 comments on commit 47afb05

Please sign in to comment.