Skip to content

Commit

Permalink
Reordered callbacks
Browse files Browse the repository at this point in the history
Signed-off-by: Travis Addair <taddair@uber.com>
  • Loading branch information
tgaddair committed Sep 17, 2020
1 parent 37ff83a commit e2c84ea
Show file tree
Hide file tree
Showing 2 changed files with 4 additions and 4 deletions.
4 changes: 2 additions & 2 deletions examples/elastic/tensorflow_keras_mnist_elastic.py
Original file line number Diff line number Diff line change
Expand Up @@ -63,9 +63,9 @@ def on_state_reset():

callbacks = [
# Horovod: elastic training callbacks to update and commit state.
hvd.elastic.CommitStateCallback(state),
hvd.elastic.UpdateBatchStateCallback(state),
hvd.elastic.UpdateEpochStateCallback(state),
hvd.elastic.UpdateBatchStateCallback(state),
hvd.elastic.CommitStateCallback(state),
]

# Horovod: save checkpoints only on worker 0 to prevent other workers from corrupting them.
Expand Down
4 changes: 2 additions & 2 deletions horovod/tensorflow/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -299,7 +299,7 @@ def __init__(self, optimizer, name=None, use_locking=False, device_dense='',
super(_DistributedOptimizer, self).__init__(name=name, use_locking=use_locking)

self._optimizer = optimizer
self._allreduce_grads = make_allreduce_grads_fn(
self._allreduce_grads = _make_allreduce_grads_fn(
name, device_dense, device_sparse, compression, sparse_as_dense, op,
gradient_predivide_factor)

Expand Down Expand Up @@ -515,7 +515,7 @@ def __init__(self, tape, device_dense, device_sparse, compression, sparse_as_den
super(self.__class__, self).__init__(persistent)

self._tape = tape
self._allreduce_grads = make_allreduce_grads_fn(
self._allreduce_grads = _make_allreduce_grads_fn(
'DistributedGradientTape', device_dense, device_sparse, compression,
sparse_as_dense, op, gradient_predivide_factor)

Expand Down

0 comments on commit e2c84ea

Please sign in to comment.