Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -117,7 +117,7 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/).
- Don't raise a warning when `nn.Module` is not saved under hparams ([#12669](https://github.com/PyTorchLightning/pytorch-lightning/pull/12669))


-
- Raise `MisconfigurationException` when the accelerator is available but the user passes invalid `([]/0/"0")` values to the `devices` flag ([#12708](https://github.com/PyTorchLightning/pytorch-lightning/pull/12708))


## [1.6.0] - 2022-03-29
Expand Down
11 changes: 11 additions & 0 deletions pytorch_lightning/trainer/connectors/accelerator_connector.py
Original file line number Diff line number Diff line change
Expand Up @@ -413,6 +413,17 @@ def _check_device_config_and_set_final_flags(
self._num_nodes_flag = int(num_nodes) if num_nodes is not None else 1
self._devices_flag = devices

if self._devices_flag in ([], 0, "0"):
accelerator_name = (
self._accelerator_flag.__class__.__qualname__
if isinstance(self._accelerator_flag, Accelerator)
else self._accelerator_flag
)
raise MisconfigurationException(
f"`Trainer(devices={self._devices_flag!r})` value is not a valid input"
f" using {accelerator_name} accelerator."
)

# TODO: Delete this method when num_processes, gpus, ipus and tpu_cores gets removed
self._map_deprecated_devices_specific_info_to_accelerator_and_device_flag(
devices, num_processes, gpus, ipus, tpu_cores
Expand Down
16 changes: 7 additions & 9 deletions tests/accelerators/test_accelerator_connector.py
Original file line number Diff line number Diff line change
Expand Up @@ -504,15 +504,6 @@ def test_accelerator_cpu(_):
trainer = Trainer(accelerator="cpu", gpus=1)


@mock.patch("torch.cuda.is_available", return_value=False)
@pytest.mark.parametrize("devices", ["0", 0, []])
def test_passing_zero_and_empty_list_to_devices_flag(_, devices):
with pytest.raises(
MisconfigurationException, match="can not run on your system since the accelerator is not available."
):
Trainer(accelerator="gpu", devices=devices)


@RunIf(min_gpus=1)
def test_accelerator_gpu():
trainer = Trainer(accelerator="gpu", devices=1)
Expand Down Expand Up @@ -1014,3 +1005,10 @@ def __init__(self, **kwargs):
def test_plugin_only_one_instance_for_one_type(plugins, expected):
with pytest.raises(MisconfigurationException, match=f"Received multiple values for {expected}"):
Trainer(plugins=plugins)


@pytest.mark.parametrize("accelerator", ("cpu", "gpu", "tpu", "ipu"))
@pytest.mark.parametrize("devices", ("0", 0, []))
def test_passing_zero_and_empty_list_to_devices_flag(accelerator, devices):
with pytest.raises(MisconfigurationException, match="value is not a valid input using"):
Trainer(accelerator=accelerator, devices=devices)
2 changes: 1 addition & 1 deletion tests/accelerators/test_cpu.py
Original file line number Diff line number Diff line change
Expand Up @@ -69,7 +69,7 @@ def load_checkpoint(self, checkpoint_path: Union[str, Path]) -> Dict[str, Any]:
func(model, ckpt_path=checkpoint_path)


@pytest.mark.parametrize("devices", ([3], -1, 0))
@pytest.mark.parametrize("devices", ([3], -1))
def test_invalid_devices_with_cpu_accelerator(devices):
"""Test invalid device flag raises MisconfigurationException with CPUAccelerator."""
with pytest.raises(MisconfigurationException, match="should be an int > 0"):
Expand Down