Permalink
Browse files
Tweak some NNI params to perform better on the test
- Loading branch information...
Showing
with
4 additions
and
2 deletions.
-
+4
−2
mloop/nnlearner.py
|
|
@@ -71,7 +71,7 @@ def _create_neural_net(self): |
|
|
tf.reduce_mean(tf.reduce_sum(tf.square(self.output_var - self.output_placeholder),
|
|
|
reduction_indices=[1]))
|
|
|
+ self.regularisation_coefficient * sum([tf.nn.l2_loss(W) for W in self.weights]))
|
|
|
- self.train_step = tf.train.AdamOptimizer().minimize(loss_func)
|
|
|
+ self.train_step = tf.train.AdamOptimizer(1.0).minimize(loss_func)
|
|
|
|
|
|
self.tf_session.run(tf.initialize_all_variables())
|
|
|
|
|
|
@@ -104,7 +104,9 @@ def fit_neural_net(self, all_params, all_costs): |
|
|
batch_output = [[all_costs[index]] for index in batch_indices]
|
|
|
self.tf_session.run(self.train_step,
|
|
|
feed_dict={self.input_placeholder: batch_input,
|
|
|
- self.output_placeholder: batch_output})
|
|
|
+ self.output_placeholder: batch_output,
|
|
|
+ self.regularisation_coefficient: 0.01,
|
|
|
+ })
|
|
|
|
|
|
def predict_cost(self,params):
|
|
|
'''
|
|
|
|
0 comments on commit
e30906a