Skip to content

Commit

Permalink
[test] disable a flaky test (#1020)
Browse files Browse the repository at this point in the history
Co-authored-by: Min Xu <min.xu.public@gmail.com>
  • Loading branch information
min-xu-ai and flying-x committed Jun 29, 2022
1 parent a5116ec commit 775a0f0
Show file tree
Hide file tree
Showing 2 changed files with 6 additions and 1 deletion.
4 changes: 4 additions & 0 deletions fair_dev/testing/testing.py
Original file line number Diff line number Diff line change
Expand Up @@ -81,6 +81,10 @@
reason="Python3.9 without CUDA is skipped",
)

skip_due_to_flakyness = pytest.mark.skip(
reason="Flaky test to be fixed or removed",
)

available_devices = ["cpu"]
if torch.cuda.is_available():
available_devices.append("cuda")
Expand Down
3 changes: 2 additions & 1 deletion tests/experimental/nn/test_multiprocess_pipe.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@
import torch.multiprocessing as mp
import torch.nn as nn

from fair_dev.testing.testing import skip_if_single_gpu
from fair_dev.testing.testing import skip_due_to_flakyness, skip_if_single_gpu
from fairscale.experimental.nn.distributed_pipeline import DistributedLoss, DistributedPipeline, PipelineModulesGraph
from fairscale.internal import torch_version

Expand Down Expand Up @@ -113,6 +113,7 @@ def create_multiple_layers():
@rpc_test(world_size=2)
@pytest.mark.parametrize("devices", DEVICES)
@skip_if_single_gpu
@skip_due_to_flakyness
def create_multiple_workers(devices):
model = [RemoteModuleParams(nn.Linear, (4, 4), {}), RemoteModuleParams(nn.ReLU, (), {})]
pipe = create_sequence_pipeline(model, balance=[1, 1], chunks=1, devices=devices[:2])
Expand Down

0 comments on commit 775a0f0

Please sign in to comment.