Skip to content

Commit

Permalink
Fix race condition in Fabric test (#17002)
Browse files Browse the repository at this point in the history
  • Loading branch information
awaelchli committed Mar 8, 2023
1 parent d7e2ab9 commit aa7f252
Showing 1 changed file with 3 additions and 1 deletion.
4 changes: 3 additions & 1 deletion tests/tests_fabric/parity/test_parity_ddp.py
Original file line number Diff line number Diff line change
Expand Up @@ -125,7 +125,6 @@ def train_fabric_ddp(fabric):
return model.state_dict(), torch.tensor(iteration_timings), memory_stats


@pytest.mark.flaky(reruns=3)
@RunIf(standalone=True)
@pytest.mark.usefixtures("reset_deterministic_algorithm", "reset_cudnn_benchmark")
@pytest.mark.parametrize(
Expand All @@ -148,6 +147,9 @@ def test_parity_ddp(accelerator, devices, tolerance):
fabric.barrier()
cuda_reset()
torch.distributed.destroy_process_group()
# sleep for a bit to avoid race conditions, since the very first call in `train_torch_ddp`
# is initializing a new process group
time.sleep(3)

# Train with raw PyTorch
state_dict_torch, timings_torch, memory_torch = train_torch_ddp(
Expand Down

0 comments on commit aa7f252

Please sign in to comment.