Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 3 additions & 0 deletions src/pytorch_lightning/CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -70,6 +70,9 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/).
- Fixed dtype inference during gradient norm computation ([#14051](https://github.com/Lightning-AI/lightning/pull/14051))


- Fixed a bug that caused `ddp_find_unused_parameters` to be set `False`, whereas the intended default is `True` ([#14095](https://github.com/Lightning-AI/lightning/pull/14095))


## [1.7.0] - 2022-08-02

### Added
Expand Down
14 changes: 12 additions & 2 deletions src/pytorch_lightning/strategies/ddp_spawn.py
Original file line number Diff line number Diff line change
Expand Up @@ -315,10 +315,20 @@ def post_training_step(self) -> None:
def register_strategies(cls, strategy_registry: Dict) -> None:
entries = (
("ddp_spawn", "spawn"),
("ddp_spawn_find_unused_parameters_false", "spawn"),
("ddp_fork", "fork"),
("ddp_fork_find_unused_parameters_false", "fork"),
("ddp_notebook", "fork"),
)
for name, start_method in entries:
strategy_registry.register(
name,
cls,
description=f"DDP strategy with `start_method` '{start_method}'",
start_method=start_method,
)

entries = (
("ddp_spawn_find_unused_parameters_false", "spawn"),
("ddp_fork_find_unused_parameters_false", "fork"),
("ddp_notebook_find_unused_parameters_false", "fork"),
)
for name, start_method in entries:
Expand Down
12 changes: 12 additions & 0 deletions tests/tests_pytorch/strategies/test_ddp.py
Original file line number Diff line number Diff line change
Expand Up @@ -194,3 +194,15 @@ def root_device(self):
assert strategy._get_process_group_backend() == expected_process_group_backend
else:
assert strategy._get_process_group_backend() == expected_process_group_backend


@pytest.mark.parametrize(
"strategy_name,expected_ddp_kwargs",
[
("ddp", {}),
("ddp_find_unused_parameters_false", {"find_unused_parameters": False}),
],
)
def test_ddp_kwargs_from_registry(strategy_name, expected_ddp_kwargs):
trainer = Trainer(strategy=strategy_name)
assert trainer.strategy._ddp_kwargs == expected_ddp_kwargs
16 changes: 16 additions & 0 deletions tests/tests_pytorch/strategies/test_ddp_spawn_strategy.py
Original file line number Diff line number Diff line change
Expand Up @@ -178,3 +178,19 @@ def test_ddp_spawn_strategy_set_timeout(mock_init_process_group):
mock_init_process_group.assert_called_with(
process_group_backend, rank=global_rank, world_size=world_size, timeout=test_timedelta
)


@pytest.mark.parametrize(
"strategy_name,expected_ddp_kwargs",
[
("ddp_spawn", {}),
("ddp_fork", {}),
("ddp_notebook", {}),
("ddp_spawn_find_unused_parameters_false", {"find_unused_parameters": False}),
("ddp_fork_find_unused_parameters_false", {"find_unused_parameters": False}),
("ddp_notebook_find_unused_parameters_false", {"find_unused_parameters": False}),
],
)
def test_ddp_kwargs_from_registry(strategy_name, expected_ddp_kwargs):
trainer = Trainer(strategy=strategy_name)
assert trainer.strategy._ddp_kwargs == expected_ddp_kwargs
14 changes: 14 additions & 0 deletions tests/tests_pytorch/strategies/test_sharded_strategy.py
Original file line number Diff line number Diff line change
Expand Up @@ -300,3 +300,17 @@ def test_block_backward_sync():
with strategy.block_backward_sync():
pass
model.no_sync.assert_called_once()


@pytest.mark.parametrize(
"strategy_name,expected_ddp_kwargs",
[
("ddp_sharded", {}),
("ddp_sharded_find_unused_parameters_false", {"find_unused_parameters": False}),
("ddp_sharded_spawn", {}),
("ddp_sharded_spawn_find_unused_parameters_false", {"find_unused_parameters": False}),
],
)
def test_ddp_kwargs_from_registry(strategy_name, expected_ddp_kwargs):
trainer = Trainer(strategy=strategy_name)
assert trainer.strategy._ddp_kwargs == expected_ddp_kwargs