Skip to content

Commit

Permalink
Use process_set size instead of world size in MXNet DistributedOptimi…
Browse files Browse the repository at this point in the history
…zer/Trainer in quick return conditions. (#3090)

Signed-off-by: Josh Romero <joshr@nvidia.com>
  • Loading branch information
romerojosh committed Aug 7, 2021
1 parent cf9f5dd commit 3892d8a
Showing 1 changed file with 2 additions and 2 deletions.
4 changes: 2 additions & 2 deletions horovod/mxnet/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -62,7 +62,7 @@ def create_state_multi_precision(self, index, weight):
return self._optimizer.create_state_multi_precision(index, weight)

def _do_allreduce(self, index, grad):
if size() == 1: return
if self._process_set.size() == 1: return

if isinstance(index, (tuple, list)):
if (self._num_groups > 0):
Expand Down Expand Up @@ -163,7 +163,7 @@ def __init__(self, params, optimizer, optimizer_params=None,
self._num_groups = num_groups

def _allreduce_grads(self):
if size() == 1: return
if self._process_set.size() == 1: return
if not self._process_set.included(): return

if (self._num_groups > 0):
Expand Down

0 comments on commit 3892d8a

Please sign in to comment.