From 027268542e8fc7436ce3d85353cd34c7f0b2ba08 Mon Sep 17 00:00:00 2001 From: awaelchli Date: Wed, 20 Jul 2022 03:06:53 +0200 Subject: [PATCH 1/3] Set default strategy to ddp_fork in interactive environments --- .../trainer/connectors/accelerator_connector.py | 6 ++++-- .../accelerators/test_accelerator_connector.py | 10 ++++++++++ 2 files changed, 14 insertions(+), 2 deletions(-) diff --git a/src/pytorch_lightning/trainer/connectors/accelerator_connector.py b/src/pytorch_lightning/trainer/connectors/accelerator_connector.py index 9f87a68b4df7d..db4ad007ddec7 100644 --- a/src/pytorch_lightning/trainer/connectors/accelerator_connector.py +++ b/src/pytorch_lightning/trainer/connectors/accelerator_connector.py @@ -88,7 +88,7 @@ _HPU_AVAILABLE, _IPU_AVAILABLE, _TORCH_GREATER_EQUAL_1_11, - _TPU_AVAILABLE, + _TPU_AVAILABLE, _IS_INTERACTIVE, ) log = logging.getLogger(__name__) @@ -588,7 +588,9 @@ def _choose_strategy(self) -> Union[Strategy, str]: # TODO: lazy initialized device, then here could be self._strategy_flag = "single_device" return SingleDeviceStrategy(device=device) # type: ignore if len(self._parallel_devices) > 1: - return DDPSpawnStrategy.strategy_name + if _IS_INTERACTIVE: + return "ddp_fork" + return "ddp_spawn" return DDPStrategy.strategy_name diff --git a/tests/tests_pytorch/accelerators/test_accelerator_connector.py b/tests/tests_pytorch/accelerators/test_accelerator_connector.py index a04418b62ebd9..fefd47fe5cefa 100644 --- a/tests/tests_pytorch/accelerators/test_accelerator_connector.py +++ b/tests/tests_pytorch/accelerators/test_accelerator_connector.py @@ -424,6 +424,16 @@ def test_strategy_choice_ddp_spawn_cpu(): assert trainer.strategy.launcher._start_method == "spawn" +@RunIf(skip_windows=True) +@mock.patch("pytorch_lightning.trainer.connectors.accelerator_connector._IS_INTERACTIVE", True) +def test_strategy_choice_ddp_fork_in_interactive(): + trainer = Trainer(strategy=None, devices=2) + assert isinstance(trainer.accelerator, CPUAccelerator) + assert isinstance(trainer.strategy, DDPSpawnStrategy) + assert isinstance(trainer.strategy.cluster_environment, LightningEnvironment) + assert trainer.strategy.launcher._start_method == "fork" + + @RunIf(skip_windows=True) def test_strategy_choice_ddp_fork_cpu(): trainer = Trainer(strategy="ddp_fork", accelerator="cpu", devices=2) From 141eeeafe10f97cf7bcc6297c10cc411879ad02b Mon Sep 17 00:00:00 2001 From: awaelchli Date: Wed, 20 Jul 2022 03:09:36 +0200 Subject: [PATCH 2/3] update --- .../tests_pytorch/accelerators/test_accelerator_connector.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/tests/tests_pytorch/accelerators/test_accelerator_connector.py b/tests/tests_pytorch/accelerators/test_accelerator_connector.py index fefd47fe5cefa..58ca7bd259255 100644 --- a/tests/tests_pytorch/accelerators/test_accelerator_connector.py +++ b/tests/tests_pytorch/accelerators/test_accelerator_connector.py @@ -427,7 +427,9 @@ def test_strategy_choice_ddp_spawn_cpu(): @RunIf(skip_windows=True) @mock.patch("pytorch_lightning.trainer.connectors.accelerator_connector._IS_INTERACTIVE", True) def test_strategy_choice_ddp_fork_in_interactive(): - trainer = Trainer(strategy=None, devices=2) + """Test that when accelerator and strategy are unspecified, the connector chooses DDP Fork in interactive + environments by default.""" + trainer = Trainer(devices=2) assert isinstance(trainer.accelerator, CPUAccelerator) assert isinstance(trainer.strategy, DDPSpawnStrategy) assert isinstance(trainer.strategy.cluster_environment, LightningEnvironment) From b14786ef3739b7fd62a300cef2ef93381aa23e43 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Wed, 20 Jul 2022 01:41:57 +0000 Subject: [PATCH 3/3] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- .../trainer/connectors/accelerator_connector.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/pytorch_lightning/trainer/connectors/accelerator_connector.py b/src/pytorch_lightning/trainer/connectors/accelerator_connector.py index db4ad007ddec7..06e4c87d5a70d 100644 --- a/src/pytorch_lightning/trainer/connectors/accelerator_connector.py +++ b/src/pytorch_lightning/trainer/connectors/accelerator_connector.py @@ -87,8 +87,9 @@ _HOROVOD_AVAILABLE, _HPU_AVAILABLE, _IPU_AVAILABLE, + _IS_INTERACTIVE, _TORCH_GREATER_EQUAL_1_11, - _TPU_AVAILABLE, _IS_INTERACTIVE, + _TPU_AVAILABLE, ) log = logging.getLogger(__name__)