Skip to content

Commit

Permalink
Update tf.keras hvd.allreduce() API to match tensorflow allreduce() (#…
Browse files Browse the repository at this point in the history
…2423)

Signed-off-by: aaron276h <aaron@determined.ai>
  • Loading branch information
aaron276h committed Nov 11, 2020
1 parent c7a48a0 commit 108d3a6
Show file tree
Hide file tree
Showing 3 changed files with 29 additions and 6 deletions.
2 changes: 2 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,8 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).

- Added support for backward_passes_per_step > 1 for TF LegacyOptimizer in graph mode. ([#2401](https://github.com/horovod/horovod/pull/2401))

- Add support for specifying `op` and `compression` in `horovod.tensorflow.keras.allreduce()`. ([#2423](https://github.com/horovod/horovod/pull/2423))

### Changed

### Deprecated
Expand Down
5 changes: 3 additions & 2 deletions horovod/_keras/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -145,10 +145,11 @@ def broadcast_global_variables(backend, root_rank):
return _eval(backend, hvd.broadcast_global_variables(root_rank))


def allreduce(backend, value, name, average, prescale_factor, postscale_factor):
def allreduce(backend, value, name, average, prescale_factor, postscale_factor, op, compression):
return _eval(backend, hvd.allreduce(tf.constant(value, name=name), average=average,
prescale_factor=prescale_factor,
postscale_factor=postscale_factor))
postscale_factor=postscale_factor,
op=op, compression=compression))


def allgather(backend, value, name):
Expand Down
28 changes: 24 additions & 4 deletions horovod/tensorflow/keras/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -119,20 +119,40 @@ def broadcast_global_variables(root_rank):
return _impl.broadcast_global_variables(K, root_rank)


def allreduce(value, name=None, average=True, prescale_factor=1.0, postscale_factor=1.0):
def allreduce(value, name=None, average=True,
prescale_factor=1.0,
postscale_factor=1.0,
op=None,
compression=Compression.none):
"""
Perform an allreduce on a tensor-compatible value.
Arguments:
value: A tensor-compatible value to reduce.
The shape of the input must be identical across all ranks.
name: Optional name for the constants created by this operation.
average: If True, computes the average over all ranks.
Otherwise, computes the sum over all ranks.
average:
.. warning:: .. deprecated:: 0.19.0
Use `op` instead. Will be removed in v0.21.0.
prescale_factor: Multiplicative factor to scale tensor before allreduce.
postscale_factor: Multiplicative factor to scale tensor after allreduce.
op: The reduction operation to combine tensors across different ranks.
Defaults to Average if None is given.
compression: Compression algorithm used to reduce the amount of data
sent and received by each worker node. Defaults to not
using compression.
"""
return _impl.allreduce(K, value, name, average, prescale_factor, postscale_factor)
return _impl.allreduce(
backend=K,
value=value,
name=name,
average=average,
prescale_factor=prescale_factor,
postscale_factor=postscale_factor,
op=op,
compression=compression)


def allgather(value, name=None):
Expand Down

0 comments on commit 108d3a6

Please sign in to comment.