Skip to content

Commit

Permalink
Fixing #2063 - Merge summaries after optimizer adds loss and gradient…
Browse files Browse the repository at this point in the history
…s summaries (#2090)
  • Loading branch information
ilblackdragon authored and martinwicke committed Apr 25, 2016
1 parent 940f8b1 commit 1e53ba6
Showing 1 changed file with 3 additions and 3 deletions.
6 changes: 3 additions & 3 deletions tensorflow/contrib/learn/python/learn/estimators/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -147,9 +147,6 @@ def _setup_training(self):
self._model_predictions, self._model_loss = self.model_fn(
self._inp, self._out)

# Set up a single operator to merge all the summaries
self._summaries = logging_ops.merge_all_summaries()

# Create trainer and augment graph with gradients and optimizer.
# Additionally creates initialization ops.
learning_rate = self.learning_rate
Expand All @@ -165,6 +162,9 @@ def _setup_training(self):
# Update ops during training, e.g. batch_norm_ops
self._train = control_flow_ops.group(self._train, *ops.get_collection('update_ops'))

# Merge all summaries into single tensor.
self._summaries = logging_ops.merge_all_summaries()

# Get all initializers for all trainable variables.
self._initializers = variables.initialize_all_variables()

Expand Down

0 comments on commit 1e53ba6

Please sign in to comment.