Skip to content

Commit

Permalink
Try to fix unit test failures for torch head (#3280)
Browse files Browse the repository at this point in the history
* rebase on master
Signed-off-by: TJ <tix@uber.com>

* rebased on master

Signed-off-by: TJ <tix@uber.com>

* revert changes in mxnet

Signed-off-by: TJ <tix@uber.com>
  • Loading branch information
TJ Xu committed Nov 19, 2021
1 parent 90b397e commit d395a88
Show file tree
Hide file tree
Showing 2 changed files with 6 additions and 5 deletions.
1 change: 1 addition & 0 deletions horovod/common/basics.py
Expand Up @@ -486,3 +486,4 @@ def _comm_process_set_id(self, comm: MPI.Comm) -> int:
elif result == self.HOROVOD_PROCESS_SET_ERROR_UNKNOWN_SET:
raise ValueError('MPI communicator does not correspond to any registered process set.')
return result

10 changes: 5 additions & 5 deletions test/parallel/test_torch.py
Expand Up @@ -2068,7 +2068,7 @@ def test_broadcast_state(self):
def new_optimizer(cls, opt_params, model):
p = {
k: v for k, v in opt_params.items()
if k in inspect.getargspec(cls.__init__).args
if k in inspect.signature(cls.__init__).parameters
}
return cls(model.parameters(), **p)

Expand Down Expand Up @@ -2220,7 +2220,7 @@ def create_model(opt_class):
params = params_0 if hvd.rank() == 0 else params_1
p = {
k: v for k, v in params.items()
if k in inspect.getargspec(opt_class.__init__).args
if k in inspect.signature(opt_class.__init__).parameters
}
opt = opt_class(model.parameters(), **p)
opt = hvd.DistributedOptimizer(opt, named_parameters=model.named_parameters())
Expand Down Expand Up @@ -2249,7 +2249,7 @@ def create_model(opt_class):
hvd.broadcast_optimizer_state(optimizer, root_rank=0)
p0 = {
k: v for k, v in params_0.items()
if k in inspect.getargspec(opt_class.__init__).args
if k in inspect.signature(opt_class.__init__).parameters
}
for k, p in p0.items():
p_actual = optimizer.param_groups[0][k]
Expand Down Expand Up @@ -2409,7 +2409,7 @@ def test_force_allreduce(self):
def new_optimizer(cls, opt_params, model):
p = {
k: v for k, v in opt_params.items()
if k in inspect.getargspec(cls.__init__).args
if k in inspect.signature(cls.__init__).parameters
}
return cls(model.parameters(), **p)

Expand Down Expand Up @@ -3136,7 +3136,7 @@ def test_optimizer_process_sets(self):
def new_optimizer(cls, opt_params, model):
p = {
k: v for k, v in opt_params.items()
if k in inspect.getargspec(cls.__init__).args
if k in inspect.signature(cls.__init__).parameters
}
return cls(model.parameters(), **p)

Expand Down

0 comments on commit d395a88

Please sign in to comment.