Skip to content

Commit

Permalink
The psutil package is now required for CPU monitoring (#17010)
Browse files Browse the repository at this point in the history
(cherry picked from commit 8d586c6)
  • Loading branch information
carmocca authored and lantiga committed Apr 3, 2023
1 parent 10774ca commit b238ef0
Show file tree
Hide file tree
Showing 7 changed files with 17 additions and 21 deletions.
3 changes: 3 additions & 0 deletions src/pytorch_lightning/CHANGELOG.md
Expand Up @@ -20,6 +20,9 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/).
- Fixed `num_nodes` not being set for `DDPFullyShardedNativeStrategy` ([#17160](https://github.com/Lightning-AI/lightning/pull/17160))


- The `psutil` package is now required for CPU monitoring ([#17010](https://github.com/Lightning-AI/lightning/pull/17010))


## [1.9.4] - 2023-03-01

### Added
Expand Down
6 changes: 3 additions & 3 deletions src/pytorch_lightning/accelerators/cpu.py
Expand Up @@ -14,12 +14,12 @@
from typing import Any, Dict, List, Union

import torch
from lightning_utilities.core.imports import RequirementCache

from lightning_fabric.accelerators.cpu import _parse_cpu_cores
from lightning_fabric.utilities.types import _DEVICE
from pytorch_lightning.accelerators.accelerator import Accelerator
from pytorch_lightning.utilities.exceptions import MisconfigurationException
from pytorch_lightning.utilities.imports import _PSUTIL_AVAILABLE


class CPUAccelerator(Accelerator):
Expand Down Expand Up @@ -76,13 +76,13 @@ def register_accelerators(cls, accelerator_registry: Dict) -> None:
_CPU_VM_PERCENT = "cpu_vm_percent"
_CPU_PERCENT = "cpu_percent"
_CPU_SWAP_PERCENT = "cpu_swap_percent"
_PSUTIL_AVAILABLE = RequirementCache("psutil")


def get_cpu_stats() -> Dict[str, float]:
if not _PSUTIL_AVAILABLE:
raise ModuleNotFoundError(
"Fetching CPU device stats requires `psutil` to be installed."
" Install it by running `pip install -U psutil`."
f"Fetching CPU device stats requires `psutil` to be installed. {str(_PSUTIL_AVAILABLE)}"
)
import psutil

Expand Down
5 changes: 2 additions & 3 deletions src/pytorch_lightning/accelerators/mps.py
Expand Up @@ -19,8 +19,8 @@
from lightning_fabric.utilities.device_parser import _parse_gpu_ids
from lightning_fabric.utilities.types import _DEVICE
from pytorch_lightning.accelerators.accelerator import Accelerator
from pytorch_lightning.accelerators.cpu import _PSUTIL_AVAILABLE
from pytorch_lightning.utilities.exceptions import MisconfigurationException
from pytorch_lightning.utilities.imports import _PSUTIL_AVAILABLE


class MPSAccelerator(Accelerator):
Expand Down Expand Up @@ -84,8 +84,7 @@ def register_accelerators(cls, accelerator_registry: Dict) -> None:
def get_device_stats() -> Dict[str, float]:
if not _PSUTIL_AVAILABLE:
raise ModuleNotFoundError(
"Fetching M1 device stats requires `psutil` to be installed."
" Install it by running `pip install -U psutil`."
f"Fetching MPS device stats requires `psutil` to be installed. {str(_PSUTIL_AVAILABLE)}"
)
import psutil

Expand Down
17 changes: 6 additions & 11 deletions src/pytorch_lightning/callbacks/device_stats_monitor.py
Expand Up @@ -21,10 +21,9 @@
from typing import Any, Dict, Optional

import pytorch_lightning as pl
from pytorch_lightning.accelerators.cpu import _PSUTIL_AVAILABLE
from pytorch_lightning.callbacks.callback import Callback
from pytorch_lightning.utilities.exceptions import MisconfigurationException
from pytorch_lightning.utilities.imports import _PSUTIL_AVAILABLE
from pytorch_lightning.utilities.rank_zero import rank_zero_warn
from pytorch_lightning.utilities.types import STEP_OUTPUT


Expand All @@ -36,14 +35,14 @@ class DeviceStatsMonitor(Callback):
Args:
cpu_stats: if ``None``, it will log CPU stats only if the accelerator is CPU.
It will raise a warning if ``psutil`` is not installed till v1.9.0.
If ``True``, it will log CPU stats regardless of the accelerator, and it will
raise an exception if ``psutil`` is not installed.
If ``True``, it will log CPU stats regardless of the accelerator.
If ``False``, it will not log CPU stats regardless of the accelerator.
Raises:
MisconfigurationException:
If ``Trainer`` has no logger.
ModuleNotFoundError:
If ``psutil`` is not installed and CPU stats are monitored.
Example:
>>> from pytorch_lightning import Trainer
Expand All @@ -70,13 +69,9 @@ def setup(
# warn in setup to warn once
device = trainer.strategy.root_device
if self._cpu_stats is None and device.type == "cpu" and not _PSUTIL_AVAILABLE:
# TODO: raise an exception from v1.9
rank_zero_warn(
"`DeviceStatsMonitor` will not log CPU stats as `psutil` is not installed."
" To install `psutil`, run `pip install psutil`."
" It will raise an exception if `psutil` is not installed post v1.9.0."
raise ModuleNotFoundError(
f"`DeviceStatsMonitor` cannot log CPU stats as `psutil` is not installed. {str(_PSUTIL_AVAILABLE)} "
)
self._cpu_stats = False

def _get_and_log_device_stats(self, trainer: "pl.Trainer", key: str) -> None:
if not trainer._logger_connector.should_update_logs:
Expand Down
2 changes: 1 addition & 1 deletion src/pytorch_lightning/utilities/imports.py
Expand Up @@ -31,7 +31,7 @@
_HIVEMIND_AVAILABLE = package_available("hivemind")
_KINETO_AVAILABLE = torch.profiler.kineto_available()
_OMEGACONF_AVAILABLE = package_available("omegaconf")
_PSUTIL_AVAILABLE = package_available("psutil")
_POPTORCH_AVAILABLE = package_available("poptorch")
_RICH_AVAILABLE = package_available("rich") and compare_version("rich", operator.ge, "10.2.2")
_TORCH_QUANTIZE_AVAILABLE = bool([eg for eg in torch.backends.quantized.supported_engines if eg != "none"])
_TORCHVISION_AVAILABLE = RequirementCache("torchvision")
Expand Down
3 changes: 1 addition & 2 deletions tests/tests_pytorch/callbacks/test_device_stats_monitor.py
Expand Up @@ -165,8 +165,7 @@ def test_device_stats_monitor_warning_when_psutil_not_available(monkeypatch, tmp
monitor = DeviceStatsMonitor()
trainer = Trainer(logger=CSVLogger(tmp_path))
assert trainer.strategy.root_device == torch.device("cpu")
# TODO: raise an exception from v1.9
with pytest.warns(UserWarning, match="psutil` is not installed"):
with pytest.raises(ModuleNotFoundError, match="psutil` is not installed"):
monitor.setup(trainer, Mock(), "fit")


Expand Down
2 changes: 1 addition & 1 deletion tests/tests_pytorch/helpers/runif.py
Expand Up @@ -22,6 +22,7 @@
from packaging.version import Version

from lightning_fabric.accelerators.cuda import num_cuda_devices
from pytorch_lightning.accelerators.cpu import _PSUTIL_AVAILABLE
from pytorch_lightning.accelerators.hpu import _HPU_AVAILABLE
from pytorch_lightning.accelerators.ipu import _IPU_AVAILABLE
from pytorch_lightning.accelerators.mps import MPSAccelerator
Expand All @@ -37,7 +38,6 @@
_HIVEMIND_AVAILABLE,
_OMEGACONF_AVAILABLE,
_ONNX_AVAILABLE,
_PSUTIL_AVAILABLE,
_TORCH_GREATER_EQUAL_2_0,
_TORCH_QUANTIZE_AVAILABLE,
)
Expand Down

0 comments on commit b238ef0

Please sign in to comment.