From 7b85d7747eabfae7b76c643c35775567d9e55f62 Mon Sep 17 00:00:00 2001 From: Luca Antiga Date: Fri, 18 Nov 2022 17:39:11 +0100 Subject: [PATCH 01/31] Switch from tensorboard to tensorboardx in logger --- requirements/pytorch/base.txt | 2 +- src/pytorch_lightning/loggers/tensorboard.py | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/requirements/pytorch/base.txt b/requirements/pytorch/base.txt index 5da61d3fe1acc..4e1ad6d4f942b 100644 --- a/requirements/pytorch/base.txt +++ b/requirements/pytorch/base.txt @@ -6,7 +6,7 @@ torch>=1.10.*, <=1.13.0 tqdm>=4.57.0, <4.65.0 PyYAML>=5.4, <=6.0 fsspec[http]>2021.06.0, <2022.8.0 -tensorboard>=2.9.1, <2.11.0 +tensorboardX>=2.0, <=2.5.1 torchmetrics>=0.7.0, <0.10.1 # needed for using fixed compare_version packaging>=17.0, <=21.3 typing-extensions>=4.0.0, <=4.4.0 diff --git a/src/pytorch_lightning/loggers/tensorboard.py b/src/pytorch_lightning/loggers/tensorboard.py index 50d6e95add25b..f29ce7fe6fd90 100644 --- a/src/pytorch_lightning/loggers/tensorboard.py +++ b/src/pytorch_lightning/loggers/tensorboard.py @@ -22,9 +22,9 @@ from typing import Any, Dict, Mapping, Optional, Union import numpy as np +from tensorboardX import SummaryWriter +from tensorboardX.summary import hparams from torch import Tensor -from torch.utils.tensorboard import SummaryWriter -from torch.utils.tensorboard.summary import hparams import pytorch_lightning as pl from lightning_lite.utilities.cloud_io import get_filesystem From cedf06e73db774758039a4f14fd346acdeeefb55 Mon Sep 17 00:00:00 2001 From: Luca Antiga Date: Fri, 18 Nov 2022 18:24:39 +0100 Subject: [PATCH 02/31] Warn if log_graph is set to True but tensorboard is not installed --- requirements/pytorch/test.txt | 1 + src/pytorch_lightning/loggers/tensorboard.py | 10 ++++++++-- src/pytorch_lightning/utilities/imports.py | 1 + 3 files changed, 10 insertions(+), 2 deletions(-) diff --git a/requirements/pytorch/test.txt b/requirements/pytorch/test.txt index 5ba99b269e002..55a8cebe4658a 100644 --- a/requirements/pytorch/test.txt +++ b/requirements/pytorch/test.txt @@ -14,3 +14,4 @@ psutil<5.9.4 # for `DeviceStatsMonitor` pandas>1.0, <1.5.2 # needed in benchmarks fastapi<0.87.0 uvicorn<0.19.1 +tensorboard>=2.9.1, <2.11.0 diff --git a/src/pytorch_lightning/loggers/tensorboard.py b/src/pytorch_lightning/loggers/tensorboard.py index f29ce7fe6fd90..ba2525bb15098 100644 --- a/src/pytorch_lightning/loggers/tensorboard.py +++ b/src/pytorch_lightning/loggers/tensorboard.py @@ -31,7 +31,7 @@ from lightning_lite.utilities.types import _PATH from pytorch_lightning.core.saving import save_hparams_to_yaml from pytorch_lightning.loggers.logger import Logger, rank_zero_experiment -from pytorch_lightning.utilities.imports import _OMEGACONF_AVAILABLE +from pytorch_lightning.utilities.imports import _OMEGACONF_AVAILABLE, _TENSORBOARD_AVAILABLE from pytorch_lightning.utilities.logger import _add_prefix, _convert_params, _flatten_dict from pytorch_lightning.utilities.logger import _sanitize_params as _utils_sanitize_params from pytorch_lightning.utilities.rank_zero import rank_zero_only, rank_zero_warn @@ -103,7 +103,13 @@ def __init__( self._name = name or "" self._version = version self._sub_dir = None if sub_dir is None else os.fspath(sub_dir) - self._log_graph = log_graph + self._log_graph = log_graph and _TENSORBOARD_AVAILABLE + + if log_graph and not _TENSORBOARD_AVAILABLE: + logging.warn( + "You have set log_graph=True but tensorboard is not available. Please pip install tensorboard." + ) + self._default_hp_metric = default_hp_metric self._prefix = prefix self._fs = get_filesystem(save_dir) diff --git a/src/pytorch_lightning/utilities/imports.py b/src/pytorch_lightning/utilities/imports.py index 803b335fa2f2d..4231d628f3b13 100644 --- a/src/pytorch_lightning/utilities/imports.py +++ b/src/pytorch_lightning/utilities/imports.py @@ -39,6 +39,7 @@ _POPTORCH_AVAILABLE = package_available("poptorch") _PSUTIL_AVAILABLE = package_available("psutil") _RICH_AVAILABLE = package_available("rich") and compare_version("rich", operator.ge, "10.2.2") +_TENSORBOARD_AVAILABLE = package_available("tensorboard") _TORCH_QUANTIZE_AVAILABLE = bool([eg for eg in torch.backends.quantized.supported_engines if eg != "none"]) _TORCHVISION_AVAILABLE = RequirementCache("torchvision") From 9b67345cc0944539cc77c711ba340750455e362f Mon Sep 17 00:00:00 2001 From: William Falcon Date: Fri, 18 Nov 2022 18:32:34 +0100 Subject: [PATCH 03/31] Update tensorboard.py --- src/pytorch_lightning/loggers/tensorboard.py | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/src/pytorch_lightning/loggers/tensorboard.py b/src/pytorch_lightning/loggers/tensorboard.py index ba2525bb15098..79a169723465b 100644 --- a/src/pytorch_lightning/loggers/tensorboard.py +++ b/src/pytorch_lightning/loggers/tensorboard.py @@ -106,9 +106,12 @@ def __init__( self._log_graph = log_graph and _TENSORBOARD_AVAILABLE if log_graph and not _TENSORBOARD_AVAILABLE: - logging.warn( - "You have set log_graph=True but tensorboard is not available. Please pip install tensorboard." - ) + m = '''You set log_graph=True but Tensorboard is not available. Please run this command to install it: + + pip install tensorboard + + ``` + logging.warn(m) self._default_hp_metric = default_hp_metric self._prefix = prefix From 57a5c3c57b49ae26d551771102d1160efc9def84 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Fri, 18 Nov 2022 17:35:29 +0000 Subject: [PATCH 04/31] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- src/pytorch_lightning/loggers/tensorboard.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/pytorch_lightning/loggers/tensorboard.py b/src/pytorch_lightning/loggers/tensorboard.py index 79a169723465b..0352dcbd231d6 100644 --- a/src/pytorch_lightning/loggers/tensorboard.py +++ b/src/pytorch_lightning/loggers/tensorboard.py @@ -107,7 +107,7 @@ def __init__( if log_graph and not _TENSORBOARD_AVAILABLE: m = '''You set log_graph=True but Tensorboard is not available. Please run this command to install it: - + pip install tensorboard ``` From 99d3f1a9b722b62665da15f12c008ab228e3565e Mon Sep 17 00:00:00 2001 From: Luca Antiga Date: Fri, 18 Nov 2022 18:39:30 +0100 Subject: [PATCH 05/31] Update src/pytorch_lightning/loggers/tensorboard.py --- src/pytorch_lightning/loggers/tensorboard.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/pytorch_lightning/loggers/tensorboard.py b/src/pytorch_lightning/loggers/tensorboard.py index 0352dcbd231d6..827794ad88f9e 100644 --- a/src/pytorch_lightning/loggers/tensorboard.py +++ b/src/pytorch_lightning/loggers/tensorboard.py @@ -110,7 +110,7 @@ def __init__( pip install tensorboard - ``` + ''' logging.warn(m) self._default_hp_metric = default_hp_metric From 1cfde33f9163ede5b51de03f934ab38975733d66 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Fri, 18 Nov 2022 17:41:01 +0000 Subject: [PATCH 06/31] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- src/pytorch_lightning/loggers/tensorboard.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/pytorch_lightning/loggers/tensorboard.py b/src/pytorch_lightning/loggers/tensorboard.py index 827794ad88f9e..a626a64a950bc 100644 --- a/src/pytorch_lightning/loggers/tensorboard.py +++ b/src/pytorch_lightning/loggers/tensorboard.py @@ -106,11 +106,11 @@ def __init__( self._log_graph = log_graph and _TENSORBOARD_AVAILABLE if log_graph and not _TENSORBOARD_AVAILABLE: - m = '''You set log_graph=True but Tensorboard is not available. Please run this command to install it: + m = """You set log_graph=True but Tensorboard is not available. Please run this command to install it: pip install tensorboard - ''' + """ logging.warn(m) self._default_hp_metric = default_hp_metric From df72585aa5da78cc7def89dfbad1e8bd429e62b2 Mon Sep 17 00:00:00 2001 From: Luca Antiga Date: Fri, 18 Nov 2022 20:09:44 +0100 Subject: [PATCH 07/31] Update src/pytorch_lightning/loggers/tensorboard.py MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Carlos MocholĂ­ --- src/pytorch_lightning/loggers/tensorboard.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/pytorch_lightning/loggers/tensorboard.py b/src/pytorch_lightning/loggers/tensorboard.py index a626a64a950bc..807c48419bcdf 100644 --- a/src/pytorch_lightning/loggers/tensorboard.py +++ b/src/pytorch_lightning/loggers/tensorboard.py @@ -111,7 +111,7 @@ def __init__( pip install tensorboard """ - logging.warn(m) + rank_zero_warn(m) self._default_hp_metric = default_hp_metric self._prefix = prefix From df0982ac04f27d7590482d2683dacb4428afc0d1 Mon Sep 17 00:00:00 2001 From: Luca Antiga Date: Fri, 18 Nov 2022 20:15:52 +0100 Subject: [PATCH 08/31] Fix warning message formatting --- src/pytorch_lightning/loggers/tensorboard.py | 10 ++++------ 1 file changed, 4 insertions(+), 6 deletions(-) diff --git a/src/pytorch_lightning/loggers/tensorboard.py b/src/pytorch_lightning/loggers/tensorboard.py index 807c48419bcdf..00e034964f8a1 100644 --- a/src/pytorch_lightning/loggers/tensorboard.py +++ b/src/pytorch_lightning/loggers/tensorboard.py @@ -106,12 +106,10 @@ def __init__( self._log_graph = log_graph and _TENSORBOARD_AVAILABLE if log_graph and not _TENSORBOARD_AVAILABLE: - m = """You set log_graph=True but Tensorboard is not available. Please run this command to install it: - - pip install tensorboard - - """ - rank_zero_warn(m) + rank_zero_warn( + "You set log_graph=True but TensorBoard is not available. " + "Please run this command to install it: pip install tensorboard." + ) self._default_hp_metric = default_hp_metric self._prefix = prefix From 505246484d1b5fee81f5d2f55242cb42310c723e Mon Sep 17 00:00:00 2001 From: Luca Antiga Date: Fri, 18 Nov 2022 20:21:21 +0100 Subject: [PATCH 09/31] Import tensorboard if available --- src/pytorch_lightning/loggers/tensorboard.py | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/src/pytorch_lightning/loggers/tensorboard.py b/src/pytorch_lightning/loggers/tensorboard.py index 00e034964f8a1..5c0f65b023b55 100644 --- a/src/pytorch_lightning/loggers/tensorboard.py +++ b/src/pytorch_lightning/loggers/tensorboard.py @@ -22,8 +22,6 @@ from typing import Any, Dict, Mapping, Optional, Union import numpy as np -from tensorboardX import SummaryWriter -from tensorboardX.summary import hparams from torch import Tensor import pytorch_lightning as pl @@ -36,6 +34,13 @@ from pytorch_lightning.utilities.logger import _sanitize_params as _utils_sanitize_params from pytorch_lightning.utilities.rank_zero import rank_zero_only, rank_zero_warn +if _TENSORBOARD_AVAILABLE: + from torch.utils.tensorboard import SummaryWriter + from torch.utils.tensorboard.summary import hparams +else: + from tensorboardX import SummaryWriter + from tensorboardX.summary import hparams + log = logging.getLogger(__name__) if _OMEGACONF_AVAILABLE: From f123b68b1b60e155817df7d38f9c024ec9ae1400 Mon Sep 17 00:00:00 2001 From: Luca Antiga Date: Fri, 18 Nov 2022 20:55:39 +0100 Subject: [PATCH 10/31] Ignore mypy error --- src/pytorch_lightning/loggers/tensorboard.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/src/pytorch_lightning/loggers/tensorboard.py b/src/pytorch_lightning/loggers/tensorboard.py index 5c0f65b023b55..1d929f356f9d5 100644 --- a/src/pytorch_lightning/loggers/tensorboard.py +++ b/src/pytorch_lightning/loggers/tensorboard.py @@ -38,7 +38,10 @@ from torch.utils.tensorboard import SummaryWriter from torch.utils.tensorboard.summary import hparams else: + # type: ignore[no-redef] from tensorboardX import SummaryWriter + + # type: ignore[no-redef] from tensorboardX.summary import hparams log = logging.getLogger(__name__) From f68f7528868fd84a5f6e871bbc7eb53df6cbe6f3 Mon Sep 17 00:00:00 2001 From: Jirka Borovec <6035284+Borda@users.noreply.github.com> Date: Fri, 18 Nov 2022 20:23:47 +0100 Subject: [PATCH 11/31] Apply suggestions from code review --- src/pytorch_lightning/loggers/tensorboard.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/pytorch_lightning/loggers/tensorboard.py b/src/pytorch_lightning/loggers/tensorboard.py index 1d929f356f9d5..5e117f8426b30 100644 --- a/src/pytorch_lightning/loggers/tensorboard.py +++ b/src/pytorch_lightning/loggers/tensorboard.py @@ -115,8 +115,8 @@ def __init__( if log_graph and not _TENSORBOARD_AVAILABLE: rank_zero_warn( - "You set log_graph=True but TensorBoard is not available. " - "Please run this command to install it: pip install tensorboard." + "You set `log_graph=True`, but TensorBoard is not available." + " Please install missing dependency `pip install tensorboard`" ) self._default_hp_metric = default_hp_metric From 31f79bff788c69c86d68cf3757ec7735e8527a58 Mon Sep 17 00:00:00 2001 From: Luca Antiga Date: Fri, 18 Nov 2022 20:27:35 +0100 Subject: [PATCH 12/31] Update src/pytorch_lightning/loggers/tensorboard.py --- src/pytorch_lightning/loggers/tensorboard.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/pytorch_lightning/loggers/tensorboard.py b/src/pytorch_lightning/loggers/tensorboard.py index 5e117f8426b30..794b95a973313 100644 --- a/src/pytorch_lightning/loggers/tensorboard.py +++ b/src/pytorch_lightning/loggers/tensorboard.py @@ -116,7 +116,7 @@ def __init__( if log_graph and not _TENSORBOARD_AVAILABLE: rank_zero_warn( "You set `log_graph=True`, but TensorBoard is not available." - " Please install missing dependency `pip install tensorboard`" + " Please install TensorBoard by running `pip install tensorboard`" ) self._default_hp_metric = default_hp_metric From ff5aeeac4b3a89c0eec41efea071c65a4f00d5bf Mon Sep 17 00:00:00 2001 From: Luca Antiga Date: Sun, 20 Nov 2022 17:48:17 +0100 Subject: [PATCH 13/31] Fix type ignore comments --- src/pytorch_lightning/loggers/tensorboard.py | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/src/pytorch_lightning/loggers/tensorboard.py b/src/pytorch_lightning/loggers/tensorboard.py index 794b95a973313..523c7d8778011 100644 --- a/src/pytorch_lightning/loggers/tensorboard.py +++ b/src/pytorch_lightning/loggers/tensorboard.py @@ -38,11 +38,8 @@ from torch.utils.tensorboard import SummaryWriter from torch.utils.tensorboard.summary import hparams else: - # type: ignore[no-redef] - from tensorboardX import SummaryWriter - - # type: ignore[no-redef] - from tensorboardX.summary import hparams + from tensorboardX import SummaryWriter # type: ignore [no-redef] + from tensorboardX.summary import hparams # type: ignore [no-redef] log = logging.getLogger(__name__) From 0a66901800a901d787b35442503ab06460d5e149 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Carlos=20Mochol=C3=AD?= Date: Fri, 18 Nov 2022 20:25:59 +0100 Subject: [PATCH 14/31] Implement suggestions --- src/pytorch_lightning/loggers/tensorboard.py | 59 +++++++++++++------ src/pytorch_lightning/utilities/imports.py | 1 - .../tests_pytorch/loggers/test_tensorboard.py | 13 ++-- 3 files changed, 50 insertions(+), 23 deletions(-) diff --git a/src/pytorch_lightning/loggers/tensorboard.py b/src/pytorch_lightning/loggers/tensorboard.py index 523c7d8778011..016cc82af7aef 100644 --- a/src/pytorch_lightning/loggers/tensorboard.py +++ b/src/pytorch_lightning/loggers/tensorboard.py @@ -19,9 +19,10 @@ import logging import os from argparse import Namespace -from typing import Any, Dict, Mapping, Optional, Union +from typing import Any, Dict, Mapping, Optional, TYPE_CHECKING, Union import numpy as np +from lightning_utilities.core.imports import RequirementCache from torch import Tensor import pytorch_lightning as pl @@ -29,17 +30,21 @@ from lightning_lite.utilities.types import _PATH from pytorch_lightning.core.saving import save_hparams_to_yaml from pytorch_lightning.loggers.logger import Logger, rank_zero_experiment -from pytorch_lightning.utilities.imports import _OMEGACONF_AVAILABLE, _TENSORBOARD_AVAILABLE +from pytorch_lightning.utilities.imports import _OMEGACONF_AVAILABLE from pytorch_lightning.utilities.logger import _add_prefix, _convert_params, _flatten_dict from pytorch_lightning.utilities.logger import _sanitize_params as _utils_sanitize_params from pytorch_lightning.utilities.rank_zero import rank_zero_only, rank_zero_warn -if _TENSORBOARD_AVAILABLE: - from torch.utils.tensorboard import SummaryWriter - from torch.utils.tensorboard.summary import hparams +_TENSORBOARD_AVAILABLE = RequirementCache("tensorboard>=2.9.1") +_TENSORBOARDX_AVAILABLE = RequirementCache("tensorboardX>=2.0") +if TYPE_CHECKING: + # assumes at least one will be installed when type checking + if _TENSORBOARD_AVAILABLE: + from torch.utils.tensorboard import SummaryWriter + else: + from tensorboardX import SummaryWriter # type: ignore[no-redef] else: - from tensorboardX import SummaryWriter # type: ignore [no-redef] - from tensorboardX.summary import hparams # type: ignore [no-redef] + SummaryWriter = Any log = logging.getLogger(__name__) @@ -51,9 +56,9 @@ class TensorBoardLogger(Logger): r""" Log to local file system in `TensorBoard `_ format. - Implemented using :class:`~torch.utils.tensorboard.SummaryWriter`. Logs are saved to - ``os.path.join(save_dir, name, version)``. This is the default logger in Lightning, it comes - preinstalled. + Implemented using :class:`torch.utils.tensorboard.SummaryWriter` if ``tensorboard`` is available. Otherwise, it uses + the ``tensorboardX`` implementation. Logs are saved to ``os.path.join(save_dir, name, version)``. This is the + default logger in Lightning, it comes preinstalled. Example: @@ -82,7 +87,7 @@ class TensorBoardLogger(Logger): sub_dir: Sub-directory to group TensorBoard logs. If a sub_dir argument is passed then logs are saved in ``/save_dir/name/version/sub_dir/``. Defaults to ``None`` in which logs are saved in ``/save_dir/name/version/``. - \**kwargs: Additional arguments used by :class:`SummaryWriter` can be passed as keyword + \**kwargs: Additional arguments used by :class:`torch.utils.tensorboard.SummaryWriter` can be passed as keyword arguments in this logger. To automatically flush to disk, `max_queue` sets the size of the queue for pending logs before flushing. `flush_secs` determines how many seconds elapses before flushing. @@ -102,6 +107,9 @@ def __init__( sub_dir: Optional[_PATH] = None, **kwargs: Any, ): + if not _TENSORBOARD_AVAILABLE and not _TENSORBOARDX_AVAILABLE: + raise ModuleNotFoundError(f"{_TENSORBOARD_AVAILABLE!s}. You can also install `tensorboardX` if you prefer.") + super().__init__() save_dir = os.fspath(save_dir) self._save_dir = save_dir @@ -109,12 +117,17 @@ def __init__( self._version = version self._sub_dir = None if sub_dir is None else os.fspath(sub_dir) self._log_graph = log_graph and _TENSORBOARD_AVAILABLE - if log_graph and not _TENSORBOARD_AVAILABLE: - rank_zero_warn( - "You set `log_graph=True`, but TensorBoard is not available." - " Please install TensorBoard by running `pip install tensorboard`" - ) + if _TENSORBOARDX_AVAILABLE: + rank_zero_warn( + "You set `TensorBoardLogger(log_graph=True)` but `tensorboard` is not available. `tensorboardX` is" + " installed but it does not support this feature." + ) + else: + rank_zero_warn( + "You set `TensorBoardLogger(log_graph=True)` but `tensorboard` is not available. " + + str(_TENSORBOARDX_AVAILABLE) + ) self._default_hp_metric = default_hp_metric self._prefix = prefix @@ -169,7 +182,7 @@ def sub_dir(self) -> Optional[str]: @property @rank_zero_experiment - def experiment(self) -> SummaryWriter: + def experiment(self) -> "SummaryWriter": r""" Actual tensorboard object. To use TensorBoard features in your :class:`~pytorch_lightning.core.module.LightningModule` do the following. @@ -185,6 +198,12 @@ def experiment(self) -> SummaryWriter: assert rank_zero_only.rank == 0, "tried to init log dirs in non global_rank=0" if self.root_dir: self._fs.makedirs(self.root_dir, exist_ok=True) + + if _TENSORBOARD_AVAILABLE: + from torch.utils.tensorboard import SummaryWriter + else: + from tensorboardX import SummaryWriter # type: ignore[no-redef] + self._experiment = SummaryWriter(log_dir=self.log_dir, **self._kwargs) return self._experiment @@ -221,6 +240,12 @@ def log_hyperparams( if metrics: self.log_metrics(metrics, 0) + + if _TENSORBOARD_AVAILABLE: + from torch.utils.tensorboard.summary import hparams + else: + from tensorboardX.summary import hparams # type: ignore[no-redef] + exp, ssi, sei = hparams(params, metrics) writer = self.experiment._get_file_writer() writer.add_summary(exp) diff --git a/src/pytorch_lightning/utilities/imports.py b/src/pytorch_lightning/utilities/imports.py index 4231d628f3b13..803b335fa2f2d 100644 --- a/src/pytorch_lightning/utilities/imports.py +++ b/src/pytorch_lightning/utilities/imports.py @@ -39,7 +39,6 @@ _POPTORCH_AVAILABLE = package_available("poptorch") _PSUTIL_AVAILABLE = package_available("psutil") _RICH_AVAILABLE = package_available("rich") and compare_version("rich", operator.ge, "10.2.2") -_TENSORBOARD_AVAILABLE = package_available("tensorboard") _TORCH_QUANTIZE_AVAILABLE = bool([eg for eg in torch.backends.quantized.supported_engines if eg != "none"]) _TORCHVISION_AVAILABLE = RequirementCache("torchvision") diff --git a/tests/tests_pytorch/loggers/test_tensorboard.py b/tests/tests_pytorch/loggers/test_tensorboard.py index 90d15c06d7bf1..fa682290b5c41 100644 --- a/tests/tests_pytorch/loggers/test_tensorboard.py +++ b/tests/tests_pytorch/loggers/test_tensorboard.py @@ -15,6 +15,7 @@ import os from argparse import Namespace from unittest import mock +from unittest.mock import Mock import numpy as np import pytest @@ -276,23 +277,25 @@ def training_step(self, *args): assert count_steps == model.indexes -@mock.patch("pytorch_lightning.loggers.tensorboard.SummaryWriter") -def test_tensorboard_finalize(summary_writer, tmpdir): +def test_tensorboard_finalize(monkeypatch, tmpdir): """Test that the SummaryWriter closes in finalize.""" + import torch.utils.tensorboard as tb + + monkeypatch.setattr(tb, "SummaryWriter", Mock()) logger = TensorBoardLogger(save_dir=tmpdir) assert logger._experiment is None logger.finalize("any") # no log calls, no experiment created -> nothing to flush - summary_writer.assert_not_called() + logger.experiment.assert_not_called() logger = TensorBoardLogger(save_dir=tmpdir) logger.log_metrics({"flush_me": 11.1}) # trigger creation of an experiment logger.finalize("any") # finalize flushes to experiment directory - summary_writer().flush.assert_called() - summary_writer().close.assert_called() + logger.experiment.flush.assert_called() + logger.experiment.close.assert_called() def test_tensorboard_save_hparams_to_yaml_once(tmpdir): From 37f99706b321ac94dc10d8204026125a8e28a4f4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Carlos=20Mochol=C3=AD?= Date: Mon, 21 Nov 2022 13:41:41 +0100 Subject: [PATCH 15/31] Fix tests when only tensorboardX is available --- tests/tests_pytorch/conftest.py | 1 + tests/tests_pytorch/loggers/test_all.py | 42 +++++++++++-------- .../tests_pytorch/loggers/test_tensorboard.py | 10 ++++- 3 files changed, 34 insertions(+), 19 deletions(-) diff --git a/tests/tests_pytorch/conftest.py b/tests/tests_pytorch/conftest.py index 2f5607828a232..a4ddd88a39ae5 100644 --- a/tests/tests_pytorch/conftest.py +++ b/tests/tests_pytorch/conftest.py @@ -75,6 +75,7 @@ def restore_env_variables(): "CUDA_MODULE_LOADING", # leaked since PyTorch 1.13 "KMP_INIT_AT_FORK", # leaked since PyTorch 1.13 "KMP_DUPLICATE_LIB_OK", # leaked since PyTorch 1.13 + "CRC32C_SW_MODE", # leaked by tensorboardX } leaked_vars.difference_update(allowlist) assert not leaked_vars, f"test is leaking environment variable(s): {set(leaked_vars)}" diff --git a/tests/tests_pytorch/loggers/test_all.py b/tests/tests_pytorch/loggers/test_all.py index 4244e984553ff..445404acc87bc 100644 --- a/tests/tests_pytorch/loggers/test_all.py +++ b/tests/tests_pytorch/loggers/test_all.py @@ -15,22 +15,16 @@ import inspect import pickle from unittest import mock -from unittest.mock import ANY +from unittest.mock import ANY, Mock import pytest import torch from pytorch_lightning import Callback, Trainer from pytorch_lightning.demos.boring_classes import BoringModel -from pytorch_lightning.loggers import ( - CometLogger, - CSVLogger, - MLFlowLogger, - NeptuneLogger, - TensorBoardLogger, - WandbLogger, -) +from pytorch_lightning.loggers import CometLogger, CSVLogger, MLFlowLogger, NeptuneLogger, WandbLogger from pytorch_lightning.loggers.logger import DummyExperiment +from pytorch_lightning.loggers.tensorboard import _TENSORBOARD_AVAILABLE, _TENSORBOARDX_AVAILABLE, TensorBoardLogger from tests_pytorch.helpers.runif import RunIf from tests_pytorch.loggers.test_comet import _patch_comet_atexit from tests_pytorch.loggers.test_mlflow import mock_mlflow_run_creation @@ -300,10 +294,17 @@ def test_logger_with_prefix_all(tmpdir, monkeypatch): logger.experiment.__getitem__().log.assert_called_once_with(1.0) # TensorBoard - with mock.patch("pytorch_lightning.loggers.tensorboard.SummaryWriter"): - logger = _instantiate_logger(TensorBoardLogger, save_dir=tmpdir, prefix=prefix) - logger.log_metrics({"test": 1.0}, step=0) - logger.experiment.add_scalar.assert_called_once_with("tmp-test", 1.0, 0) + if _TENSORBOARD_AVAILABLE: + import torch.utils.tensorboard as tb + elif not _TENSORBOARDX_AVAILABLE: + import tensorboardX as tb + else: + pytest.skip("`tensorboard` not installed.") + + monkeypatch.setattr(tb, "SummaryWriter", Mock()) + logger = _instantiate_logger(TensorBoardLogger, save_dir=tmpdir, prefix=prefix) + logger.log_metrics({"test": 1.0}, step=0) + logger.experiment.add_scalar.assert_called_once_with("tmp-test", 1.0, 0) # WandB with mock.patch("pytorch_lightning.loggers.wandb.wandb") as wandb, mock.patch( @@ -316,7 +317,7 @@ def test_logger_with_prefix_all(tmpdir, monkeypatch): logger.experiment.log.assert_called_once_with({"tmp-test": 1.0, "trainer/global_step": 0}) -def test_logger_default_name(tmpdir): +def test_logger_default_name(tmpdir, monkeypatch): """Test that the default logger name is lightning_logs.""" # CSV @@ -324,9 +325,16 @@ def test_logger_default_name(tmpdir): assert logger.name == "lightning_logs" # TensorBoard - with mock.patch("pytorch_lightning.loggers.tensorboard.SummaryWriter"): - logger = _instantiate_logger(TensorBoardLogger, save_dir=tmpdir) - assert logger.name == "lightning_logs" + if _TENSORBOARD_AVAILABLE: + import torch.utils.tensorboard as tb + elif not _TENSORBOARDX_AVAILABLE: + import tensorboardX as tb + else: + pytest.skip("`tensorboard` not installed.") + + monkeypatch.setattr(tb, "SummaryWriter", Mock()) + logger = _instantiate_logger(TensorBoardLogger, save_dir=tmpdir) + assert logger.name == "lightning_logs" # MLflow with mock.patch("pytorch_lightning.loggers.mlflow.mlflow"), mock.patch( diff --git a/tests/tests_pytorch/loggers/test_tensorboard.py b/tests/tests_pytorch/loggers/test_tensorboard.py index fa682290b5c41..243cde33ed562 100644 --- a/tests/tests_pytorch/loggers/test_tensorboard.py +++ b/tests/tests_pytorch/loggers/test_tensorboard.py @@ -24,7 +24,7 @@ from pytorch_lightning import Trainer from pytorch_lightning.demos.boring_classes import BoringModel -from pytorch_lightning.loggers import TensorBoardLogger +from pytorch_lightning.loggers.tensorboard import _TENSORBOARD_AVAILABLE, _TENSORBOARDX_AVAILABLE, TensorBoardLogger from pytorch_lightning.utilities.imports import _OMEGACONF_AVAILABLE from tests_pytorch.helpers.runif import RunIf @@ -221,6 +221,7 @@ def test_tensorboard_log_graph(tmpdir, example_input_array): logger.log_graph(model, example_input_array) +@pytest.mark.skipif(not _TENSORBOARD_AVAILABLE, reason=str(_TENSORBOARD_AVAILABLE)) def test_tensorboard_log_graph_warning_no_example_input_array(tmpdir): """test that log graph throws warning if model.example_input_array is None.""" model = BoringModel() @@ -279,7 +280,12 @@ def training_step(self, *args): def test_tensorboard_finalize(monkeypatch, tmpdir): """Test that the SummaryWriter closes in finalize.""" - import torch.utils.tensorboard as tb + if _TENSORBOARD_AVAILABLE: + import torch.utils.tensorboard as tb + elif not _TENSORBOARDX_AVAILABLE: + import tensorboardX as tb + else: + pytest.skip("`tensorboard` not installed.") monkeypatch.setattr(tb, "SummaryWriter", Mock()) logger = TensorBoardLogger(save_dir=tmpdir) From fcbaea378fc8d889d0d98105bb7545c3a3c1e444 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Carlos=20Mochol=C3=AD?= Date: Mon, 21 Nov 2022 14:04:31 +0100 Subject: [PATCH 16/31] Fix CLI test --- tests/tests_pytorch/test_cli.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/tests/tests_pytorch/test_cli.py b/tests/tests_pytorch/test_cli.py index 5e864cea3568d..79562e52e3fea 100644 --- a/tests/tests_pytorch/test_cli.py +++ b/tests/tests_pytorch/test_cli.py @@ -1330,7 +1330,9 @@ def test_tensorboard_logger_init_args(): "TensorBoardLogger", { "save_dir": "tb", # Resolve from TensorBoardLogger.__init__ - "comment": "tb", # Resolve from tensorboard.writer.SummaryWriter.__init__ + }, + { + "comment": "tb", # Unsupported resolving from local imports }, ) From c02d14e9e897c445b56428bd58324e6505b3d0df Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Carlos=20Mochol=C3=AD?= Date: Mon, 21 Nov 2022 16:37:50 +0100 Subject: [PATCH 17/31] Fix merge --- requirements/pytorch/test.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/pytorch/test.txt b/requirements/pytorch/test.txt index 55a8cebe4658a..c7308ae312b5a 100644 --- a/requirements/pytorch/test.txt +++ b/requirements/pytorch/test.txt @@ -14,4 +14,4 @@ psutil<5.9.4 # for `DeviceStatsMonitor` pandas>1.0, <1.5.2 # needed in benchmarks fastapi<0.87.0 uvicorn<0.19.1 -tensorboard>=2.9.1, <2.11.0 +tensorboard>=2.9.1, <2.12.0 From 0e4552f467873361e06b61cb1832d1fc4b598e0e Mon Sep 17 00:00:00 2001 From: Jirka Borovec <6035284+Borda@users.noreply.github.com> Date: Mon, 21 Nov 2022 17:25:27 +0100 Subject: [PATCH 18/31] Apply suggestions from code review --- src/pytorch_lightning/loggers/tensorboard.py | 12 ++---------- 1 file changed, 2 insertions(+), 10 deletions(-) diff --git a/src/pytorch_lightning/loggers/tensorboard.py b/src/pytorch_lightning/loggers/tensorboard.py index 016cc82af7aef..6c2693339c450 100644 --- a/src/pytorch_lightning/loggers/tensorboard.py +++ b/src/pytorch_lightning/loggers/tensorboard.py @@ -118,16 +118,8 @@ def __init__( self._sub_dir = None if sub_dir is None else os.fspath(sub_dir) self._log_graph = log_graph and _TENSORBOARD_AVAILABLE if log_graph and not _TENSORBOARD_AVAILABLE: - if _TENSORBOARDX_AVAILABLE: - rank_zero_warn( - "You set `TensorBoardLogger(log_graph=True)` but `tensorboard` is not available. `tensorboardX` is" - " installed but it does not support this feature." - ) - else: - rank_zero_warn( - "You set `TensorBoardLogger(log_graph=True)` but `tensorboard` is not available. " - + str(_TENSORBOARDX_AVAILABLE) - ) + rank_zero_warn( + "You set `TensorBoardLogger(log_graph=True)` but `tensorboard` is not available.") self._default_hp_metric = default_hp_metric self._prefix = prefix From d9db264a111e12e7c81b2e884dade22e3bcd05ad Mon Sep 17 00:00:00 2001 From: Jirka Borovec <6035284+Borda@users.noreply.github.com> Date: Mon, 21 Nov 2022 17:25:38 +0100 Subject: [PATCH 19/31] Apply suggestions from code review --- src/pytorch_lightning/loggers/tensorboard.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/pytorch_lightning/loggers/tensorboard.py b/src/pytorch_lightning/loggers/tensorboard.py index 6c2693339c450..eb0cd9787ef20 100644 --- a/src/pytorch_lightning/loggers/tensorboard.py +++ b/src/pytorch_lightning/loggers/tensorboard.py @@ -107,7 +107,7 @@ def __init__( sub_dir: Optional[_PATH] = None, **kwargs: Any, ): - if not _TENSORBOARD_AVAILABLE and not _TENSORBOARDX_AVAILABLE: + if not _TENSORBOARD_AVAILABLE: raise ModuleNotFoundError(f"{_TENSORBOARD_AVAILABLE!s}. You can also install `tensorboardX` if you prefer.") super().__init__() From 4576ef3c05a694d74083950464371ff45095b23b Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 21 Nov 2022 16:28:50 +0000 Subject: [PATCH 20/31] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- src/pytorch_lightning/loggers/tensorboard.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/src/pytorch_lightning/loggers/tensorboard.py b/src/pytorch_lightning/loggers/tensorboard.py index eb0cd9787ef20..1482b96434f08 100644 --- a/src/pytorch_lightning/loggers/tensorboard.py +++ b/src/pytorch_lightning/loggers/tensorboard.py @@ -118,8 +118,7 @@ def __init__( self._sub_dir = None if sub_dir is None else os.fspath(sub_dir) self._log_graph = log_graph and _TENSORBOARD_AVAILABLE if log_graph and not _TENSORBOARD_AVAILABLE: - rank_zero_warn( - "You set `TensorBoardLogger(log_graph=True)` but `tensorboard` is not available.") + rank_zero_warn("You set `TensorBoardLogger(log_graph=True)` but `tensorboard` is not available.") self._default_hp_metric = default_hp_metric self._prefix = prefix From 105da79e61081042938a3e699a4323cb6403c1d6 Mon Sep 17 00:00:00 2001 From: Jirka Date: Mon, 21 Nov 2022 18:05:12 +0100 Subject: [PATCH 21/31] simplify for TBX as required pkg --- src/pytorch_lightning/loggers/tensorboard.py | 42 +++++-------------- tests/tests_pytorch/loggers/test_all.py | 10 ++--- .../tests_pytorch/loggers/test_tensorboard.py | 6 +-- 3 files changed, 15 insertions(+), 43 deletions(-) diff --git a/src/pytorch_lightning/loggers/tensorboard.py b/src/pytorch_lightning/loggers/tensorboard.py index 1482b96434f08..c6e385a67abf3 100644 --- a/src/pytorch_lightning/loggers/tensorboard.py +++ b/src/pytorch_lightning/loggers/tensorboard.py @@ -19,10 +19,12 @@ import logging import os from argparse import Namespace -from typing import Any, Dict, Mapping, Optional, TYPE_CHECKING, Union +from typing import Any, Dict, Mapping, Optional, Union import numpy as np from lightning_utilities.core.imports import RequirementCache +from tensorboardX import SummaryWriter +from tensorboardX.summary import hparams from torch import Tensor import pytorch_lightning as pl @@ -35,19 +37,10 @@ from pytorch_lightning.utilities.logger import _sanitize_params as _utils_sanitize_params from pytorch_lightning.utilities.rank_zero import rank_zero_only, rank_zero_warn -_TENSORBOARD_AVAILABLE = RequirementCache("tensorboard>=2.9.1") -_TENSORBOARDX_AVAILABLE = RequirementCache("tensorboardX>=2.0") -if TYPE_CHECKING: - # assumes at least one will be installed when type checking - if _TENSORBOARD_AVAILABLE: - from torch.utils.tensorboard import SummaryWriter - else: - from tensorboardX import SummaryWriter # type: ignore[no-redef] -else: - SummaryWriter = Any - log = logging.getLogger(__name__) +_TENSORBOARD_AVAILABLE = RequirementCache("tensorboard") + if _OMEGACONF_AVAILABLE: from omegaconf import Container, OmegaConf @@ -56,9 +49,9 @@ class TensorBoardLogger(Logger): r""" Log to local file system in `TensorBoard `_ format. - Implemented using :class:`torch.utils.tensorboard.SummaryWriter` if ``tensorboard`` is available. Otherwise, it uses - the ``tensorboardX`` implementation. Logs are saved to ``os.path.join(save_dir, name, version)``. This is the - default logger in Lightning, it comes preinstalled. + Implemented using :class:`~tensorboardX.SummaryWriter`. Logs are saved to + ``os.path.join(save_dir, name, version)``. This is the default logger in Lightning, it comes + preinstalled. Example: @@ -87,7 +80,7 @@ class TensorBoardLogger(Logger): sub_dir: Sub-directory to group TensorBoard logs. If a sub_dir argument is passed then logs are saved in ``/save_dir/name/version/sub_dir/``. Defaults to ``None`` in which logs are saved in ``/save_dir/name/version/``. - \**kwargs: Additional arguments used by :class:`torch.utils.tensorboard.SummaryWriter` can be passed as keyword + \**kwargs: Additional arguments used by :class:`tensorboardX.SummaryWriter` can be passed as keyword arguments in this logger. To automatically flush to disk, `max_queue` sets the size of the queue for pending logs before flushing. `flush_secs` determines how many seconds elapses before flushing. @@ -107,9 +100,6 @@ def __init__( sub_dir: Optional[_PATH] = None, **kwargs: Any, ): - if not _TENSORBOARD_AVAILABLE: - raise ModuleNotFoundError(f"{_TENSORBOARD_AVAILABLE!s}. You can also install `tensorboardX` if you prefer.") - super().__init__() save_dir = os.fspath(save_dir) self._save_dir = save_dir @@ -173,7 +163,7 @@ def sub_dir(self) -> Optional[str]: @property @rank_zero_experiment - def experiment(self) -> "SummaryWriter": + def experiment(self) -> SummaryWriter: r""" Actual tensorboard object. To use TensorBoard features in your :class:`~pytorch_lightning.core.module.LightningModule` do the following. @@ -189,12 +179,6 @@ def experiment(self) -> "SummaryWriter": assert rank_zero_only.rank == 0, "tried to init log dirs in non global_rank=0" if self.root_dir: self._fs.makedirs(self.root_dir, exist_ok=True) - - if _TENSORBOARD_AVAILABLE: - from torch.utils.tensorboard import SummaryWriter - else: - from tensorboardX import SummaryWriter # type: ignore[no-redef] - self._experiment = SummaryWriter(log_dir=self.log_dir, **self._kwargs) return self._experiment @@ -231,12 +215,6 @@ def log_hyperparams( if metrics: self.log_metrics(metrics, 0) - - if _TENSORBOARD_AVAILABLE: - from torch.utils.tensorboard.summary import hparams - else: - from tensorboardX.summary import hparams # type: ignore[no-redef] - exp, ssi, sei = hparams(params, metrics) writer = self.experiment._get_file_writer() writer.add_summary(exp) diff --git a/tests/tests_pytorch/loggers/test_all.py b/tests/tests_pytorch/loggers/test_all.py index 445404acc87bc..536b17533c2ee 100644 --- a/tests/tests_pytorch/loggers/test_all.py +++ b/tests/tests_pytorch/loggers/test_all.py @@ -24,7 +24,7 @@ from pytorch_lightning.demos.boring_classes import BoringModel from pytorch_lightning.loggers import CometLogger, CSVLogger, MLFlowLogger, NeptuneLogger, WandbLogger from pytorch_lightning.loggers.logger import DummyExperiment -from pytorch_lightning.loggers.tensorboard import _TENSORBOARD_AVAILABLE, _TENSORBOARDX_AVAILABLE, TensorBoardLogger +from pytorch_lightning.loggers.tensorboard import _TENSORBOARD_AVAILABLE, TensorBoardLogger from tests_pytorch.helpers.runif import RunIf from tests_pytorch.loggers.test_comet import _patch_comet_atexit from tests_pytorch.loggers.test_mlflow import mock_mlflow_run_creation @@ -296,10 +296,8 @@ def test_logger_with_prefix_all(tmpdir, monkeypatch): # TensorBoard if _TENSORBOARD_AVAILABLE: import torch.utils.tensorboard as tb - elif not _TENSORBOARDX_AVAILABLE: - import tensorboardX as tb else: - pytest.skip("`tensorboard` not installed.") + import tensorboardX as tb monkeypatch.setattr(tb, "SummaryWriter", Mock()) logger = _instantiate_logger(TensorBoardLogger, save_dir=tmpdir, prefix=prefix) @@ -327,10 +325,8 @@ def test_logger_default_name(tmpdir, monkeypatch): # TensorBoard if _TENSORBOARD_AVAILABLE: import torch.utils.tensorboard as tb - elif not _TENSORBOARDX_AVAILABLE: - import tensorboardX as tb else: - pytest.skip("`tensorboard` not installed.") + import tensorboardX as tb monkeypatch.setattr(tb, "SummaryWriter", Mock()) logger = _instantiate_logger(TensorBoardLogger, save_dir=tmpdir) diff --git a/tests/tests_pytorch/loggers/test_tensorboard.py b/tests/tests_pytorch/loggers/test_tensorboard.py index 243cde33ed562..f1986892899be 100644 --- a/tests/tests_pytorch/loggers/test_tensorboard.py +++ b/tests/tests_pytorch/loggers/test_tensorboard.py @@ -24,7 +24,7 @@ from pytorch_lightning import Trainer from pytorch_lightning.demos.boring_classes import BoringModel -from pytorch_lightning.loggers.tensorboard import _TENSORBOARD_AVAILABLE, _TENSORBOARDX_AVAILABLE, TensorBoardLogger +from pytorch_lightning.loggers.tensorboard import _TENSORBOARD_AVAILABLE, TensorBoardLogger from pytorch_lightning.utilities.imports import _OMEGACONF_AVAILABLE from tests_pytorch.helpers.runif import RunIf @@ -282,10 +282,8 @@ def test_tensorboard_finalize(monkeypatch, tmpdir): """Test that the SummaryWriter closes in finalize.""" if _TENSORBOARD_AVAILABLE: import torch.utils.tensorboard as tb - elif not _TENSORBOARDX_AVAILABLE: - import tensorboardX as tb else: - pytest.skip("`tensorboard` not installed.") + import tensorboardX as tb monkeypatch.setattr(tb, "SummaryWriter", Mock()) logger = TensorBoardLogger(save_dir=tmpdir) From 14f709decc1740a1219652a907b73866ca66e632 Mon Sep 17 00:00:00 2001 From: Jirka Date: Mon, 21 Nov 2022 18:11:40 +0100 Subject: [PATCH 22/31] simplify for TBX as required pkg --- tests/tests_pytorch/loggers/test_all.py | 38 +++++++++---------- .../tests_pytorch/loggers/test_tensorboard.py | 19 ++++------ 2 files changed, 24 insertions(+), 33 deletions(-) diff --git a/tests/tests_pytorch/loggers/test_all.py b/tests/tests_pytorch/loggers/test_all.py index 536b17533c2ee..4244e984553ff 100644 --- a/tests/tests_pytorch/loggers/test_all.py +++ b/tests/tests_pytorch/loggers/test_all.py @@ -15,16 +15,22 @@ import inspect import pickle from unittest import mock -from unittest.mock import ANY, Mock +from unittest.mock import ANY import pytest import torch from pytorch_lightning import Callback, Trainer from pytorch_lightning.demos.boring_classes import BoringModel -from pytorch_lightning.loggers import CometLogger, CSVLogger, MLFlowLogger, NeptuneLogger, WandbLogger +from pytorch_lightning.loggers import ( + CometLogger, + CSVLogger, + MLFlowLogger, + NeptuneLogger, + TensorBoardLogger, + WandbLogger, +) from pytorch_lightning.loggers.logger import DummyExperiment -from pytorch_lightning.loggers.tensorboard import _TENSORBOARD_AVAILABLE, TensorBoardLogger from tests_pytorch.helpers.runif import RunIf from tests_pytorch.loggers.test_comet import _patch_comet_atexit from tests_pytorch.loggers.test_mlflow import mock_mlflow_run_creation @@ -294,15 +300,10 @@ def test_logger_with_prefix_all(tmpdir, monkeypatch): logger.experiment.__getitem__().log.assert_called_once_with(1.0) # TensorBoard - if _TENSORBOARD_AVAILABLE: - import torch.utils.tensorboard as tb - else: - import tensorboardX as tb - - monkeypatch.setattr(tb, "SummaryWriter", Mock()) - logger = _instantiate_logger(TensorBoardLogger, save_dir=tmpdir, prefix=prefix) - logger.log_metrics({"test": 1.0}, step=0) - logger.experiment.add_scalar.assert_called_once_with("tmp-test", 1.0, 0) + with mock.patch("pytorch_lightning.loggers.tensorboard.SummaryWriter"): + logger = _instantiate_logger(TensorBoardLogger, save_dir=tmpdir, prefix=prefix) + logger.log_metrics({"test": 1.0}, step=0) + logger.experiment.add_scalar.assert_called_once_with("tmp-test", 1.0, 0) # WandB with mock.patch("pytorch_lightning.loggers.wandb.wandb") as wandb, mock.patch( @@ -315,7 +316,7 @@ def test_logger_with_prefix_all(tmpdir, monkeypatch): logger.experiment.log.assert_called_once_with({"tmp-test": 1.0, "trainer/global_step": 0}) -def test_logger_default_name(tmpdir, monkeypatch): +def test_logger_default_name(tmpdir): """Test that the default logger name is lightning_logs.""" # CSV @@ -323,14 +324,9 @@ def test_logger_default_name(tmpdir, monkeypatch): assert logger.name == "lightning_logs" # TensorBoard - if _TENSORBOARD_AVAILABLE: - import torch.utils.tensorboard as tb - else: - import tensorboardX as tb - - monkeypatch.setattr(tb, "SummaryWriter", Mock()) - logger = _instantiate_logger(TensorBoardLogger, save_dir=tmpdir) - assert logger.name == "lightning_logs" + with mock.patch("pytorch_lightning.loggers.tensorboard.SummaryWriter"): + logger = _instantiate_logger(TensorBoardLogger, save_dir=tmpdir) + assert logger.name == "lightning_logs" # MLflow with mock.patch("pytorch_lightning.loggers.mlflow.mlflow"), mock.patch( diff --git a/tests/tests_pytorch/loggers/test_tensorboard.py b/tests/tests_pytorch/loggers/test_tensorboard.py index f1986892899be..ddab738269904 100644 --- a/tests/tests_pytorch/loggers/test_tensorboard.py +++ b/tests/tests_pytorch/loggers/test_tensorboard.py @@ -15,7 +15,6 @@ import os from argparse import Namespace from unittest import mock -from unittest.mock import Mock import numpy as np import pytest @@ -24,7 +23,8 @@ from pytorch_lightning import Trainer from pytorch_lightning.demos.boring_classes import BoringModel -from pytorch_lightning.loggers.tensorboard import _TENSORBOARD_AVAILABLE, TensorBoardLogger +from pytorch_lightning.loggers import TensorBoardLogger +from pytorch_lightning.loggers.tensorboard import _TENSORBOARD_AVAILABLE from pytorch_lightning.utilities.imports import _OMEGACONF_AVAILABLE from tests_pytorch.helpers.runif import RunIf @@ -278,28 +278,23 @@ def training_step(self, *args): assert count_steps == model.indexes -def test_tensorboard_finalize(monkeypatch, tmpdir): +@mock.patch("pytorch_lightning.loggers.tensorboard.SummaryWriter") +def test_tensorboard_finalize(summary_writer, tmpdir): """Test that the SummaryWriter closes in finalize.""" - if _TENSORBOARD_AVAILABLE: - import torch.utils.tensorboard as tb - else: - import tensorboardX as tb - - monkeypatch.setattr(tb, "SummaryWriter", Mock()) logger = TensorBoardLogger(save_dir=tmpdir) assert logger._experiment is None logger.finalize("any") # no log calls, no experiment created -> nothing to flush - logger.experiment.assert_not_called() + summary_writer.assert_not_called() logger = TensorBoardLogger(save_dir=tmpdir) logger.log_metrics({"flush_me": 11.1}) # trigger creation of an experiment logger.finalize("any") # finalize flushes to experiment directory - logger.experiment.flush.assert_called() - logger.experiment.close.assert_called() + summary_writer().flush.assert_called() + summary_writer().close.assert_called() def test_tensorboard_save_hparams_to_yaml_once(tmpdir): From b1f04d7a585ac9996899b0da315705593280cb8c Mon Sep 17 00:00:00 2001 From: Jirka Date: Mon, 21 Nov 2022 18:31:49 +0100 Subject: [PATCH 23/31] docs example --- src/pytorch_lightning/loggers/tensorboard.py | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/src/pytorch_lightning/loggers/tensorboard.py b/src/pytorch_lightning/loggers/tensorboard.py index c6e385a67abf3..925a6ea25a04c 100644 --- a/src/pytorch_lightning/loggers/tensorboard.py +++ b/src/pytorch_lightning/loggers/tensorboard.py @@ -19,6 +19,7 @@ import logging import os from argparse import Namespace + from typing import Any, Dict, Mapping, Optional, Union import numpy as np @@ -85,6 +86,15 @@ class TensorBoardLogger(Logger): of the queue for pending logs before flushing. `flush_secs` determines how many seconds elapses before flushing. + Example: + >>> import shutil, tempfile + >>> tmp = tempfile.mkdtemp() + >>> tbl = TensorBoardLogger(tmp) + >>> tbl.log_hyperparams({"epochs": 5, "optimizer": "Adam"}) + >>> tbl.log_metrics({"acc": 0.75}) + >>> tbl.log_metrics({"acc": 0.9}) + >>> tbl.finalize("success") + >>> shutil.rmtree(tmp) """ NAME_HPARAMS_FILE = "hparams.yaml" LOGGER_JOIN_CHAR = "-" From 65c4798918a8327dccd35b3bfc199fa988c6dbae Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 21 Nov 2022 17:33:23 +0000 Subject: [PATCH 24/31] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- src/pytorch_lightning/loggers/tensorboard.py | 1 - 1 file changed, 1 deletion(-) diff --git a/src/pytorch_lightning/loggers/tensorboard.py b/src/pytorch_lightning/loggers/tensorboard.py index 925a6ea25a04c..51afd329a60bd 100644 --- a/src/pytorch_lightning/loggers/tensorboard.py +++ b/src/pytorch_lightning/loggers/tensorboard.py @@ -19,7 +19,6 @@ import logging import os from argparse import Namespace - from typing import Any, Dict, Mapping, Optional, Union import numpy as np From d84184d090d1743bf696c9a598f076b21fbf9512 Mon Sep 17 00:00:00 2001 From: Jirka Date: Mon, 21 Nov 2022 18:33:45 +0100 Subject: [PATCH 25/31] ... --- src/pytorch_lightning/loggers/tensorboard.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/pytorch_lightning/loggers/tensorboard.py b/src/pytorch_lightning/loggers/tensorboard.py index 925a6ea25a04c..d51e47a991bed 100644 --- a/src/pytorch_lightning/loggers/tensorboard.py +++ b/src/pytorch_lightning/loggers/tensorboard.py @@ -116,9 +116,9 @@ def __init__( self._name = name or "" self._version = version self._sub_dir = None if sub_dir is None else os.fspath(sub_dir) - self._log_graph = log_graph and _TENSORBOARD_AVAILABLE if log_graph and not _TENSORBOARD_AVAILABLE: rank_zero_warn("You set `TensorBoardLogger(log_graph=True)` but `tensorboard` is not available.") + self._log_graph = log_graph and _TENSORBOARD_AVAILABLE self._default_hp_metric = default_hp_metric self._prefix = prefix From 0683ca362d933915839b5a1219cfcd7085ade006 Mon Sep 17 00:00:00 2001 From: Jirka Date: Mon, 21 Nov 2022 18:44:40 +0100 Subject: [PATCH 26/31] tests --- tests/tests_pytorch/test_cli.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/tests/tests_pytorch/test_cli.py b/tests/tests_pytorch/test_cli.py index 79562e52e3fea..5e864cea3568d 100644 --- a/tests/tests_pytorch/test_cli.py +++ b/tests/tests_pytorch/test_cli.py @@ -1330,9 +1330,7 @@ def test_tensorboard_logger_init_args(): "TensorBoardLogger", { "save_dir": "tb", # Resolve from TensorBoardLogger.__init__ - }, - { - "comment": "tb", # Unsupported resolving from local imports + "comment": "tb", # Resolve from tensorboard.writer.SummaryWriter.__init__ }, ) From 0488a6c6c84771c095b6aabb18f0e7f0aa24320e Mon Sep 17 00:00:00 2001 From: Jirka Date: Mon, 21 Nov 2022 19:06:55 +0100 Subject: [PATCH 27/31] chlog --- src/pytorch_lightning/CHANGELOG.md | 3 +++ 1 file changed, 3 insertions(+) diff --git a/src/pytorch_lightning/CHANGELOG.md b/src/pytorch_lightning/CHANGELOG.md index 4f85b5223fcd5..ea165fe0a8234 100644 --- a/src/pytorch_lightning/CHANGELOG.md +++ b/src/pytorch_lightning/CHANGELOG.md @@ -32,6 +32,9 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/). - Drop PyTorch 1.9 support ([#15347](https://github.com/Lightning-AI/lightning/pull/15347)) +- Switch from `tensorboard` to `tensorboardx` in `TensorBoardLogger` ([#15728](https://github.com/Lightning-AI/lightning/pull/15728)) + + - From now on, Lightning Trainer and `LightningModule.load_from_checkpoint` automatically upgrade the loaded checkpoint if it was produced in an old version of Lightning ([#15237](https://github.com/Lightning-AI/lightning/pull/15237)) From 61bdeafa01af0c32a44e8d6e8b0af297f1eb68d0 Mon Sep 17 00:00:00 2001 From: Jirka Date: Mon, 21 Nov 2022 19:27:17 +0100 Subject: [PATCH 28/31] reqs --- requirements/pytorch/extra.txt | 1 - requirements/pytorch/test.txt | 2 ++ 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/requirements/pytorch/extra.txt b/requirements/pytorch/extra.txt index 471f0aafbd50b..3eb221d020230 100644 --- a/requirements/pytorch/extra.txt +++ b/requirements/pytorch/extra.txt @@ -7,4 +7,3 @@ omegaconf>=2.0.5, <2.3.0 hydra-core>=1.0.5, <1.3.0 jsonargparse[signatures]>=4.15.2, <4.16.0 rich>=10.14.0, !=10.15.0.a, <13.0.0 -protobuf<=3.20.1 # strict # an extra is updating protobuf, this pin prevents TensorBoard failure diff --git a/requirements/pytorch/test.txt b/requirements/pytorch/test.txt index c7308ae312b5a..d1c00cc07cbe2 100644 --- a/requirements/pytorch/test.txt +++ b/requirements/pytorch/test.txt @@ -14,4 +14,6 @@ psutil<5.9.4 # for `DeviceStatsMonitor` pandas>1.0, <1.5.2 # needed in benchmarks fastapi<0.87.0 uvicorn<0.19.1 + tensorboard>=2.9.1, <2.12.0 +protobuf<=3.20.1 # strict # an extra is updating protobuf, this pin prevents TensorBoard failure \ No newline at end of file From d3dc7ee86b18bf91910deacd07ffc2ead6cf6d62 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 21 Nov 2022 18:28:48 +0000 Subject: [PATCH 29/31] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- requirements/pytorch/test.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/pytorch/test.txt b/requirements/pytorch/test.txt index d1c00cc07cbe2..d27e3677690a5 100644 --- a/requirements/pytorch/test.txt +++ b/requirements/pytorch/test.txt @@ -16,4 +16,4 @@ fastapi<0.87.0 uvicorn<0.19.1 tensorboard>=2.9.1, <2.12.0 -protobuf<=3.20.1 # strict # an extra is updating protobuf, this pin prevents TensorBoard failure \ No newline at end of file +protobuf<=3.20.1 # strict # an extra is updating protobuf, this pin prevents TensorBoard failure From 317db177abd9d6e4db0b4d2fddd1707fbd954e24 Mon Sep 17 00:00:00 2001 From: Jirka Date: Mon, 21 Nov 2022 20:30:03 +0100 Subject: [PATCH 30/31] tbx 2.4 --- requirements/pytorch/base.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/pytorch/base.txt b/requirements/pytorch/base.txt index 4e1ad6d4f942b..abd317519a57c 100644 --- a/requirements/pytorch/base.txt +++ b/requirements/pytorch/base.txt @@ -6,7 +6,7 @@ torch>=1.10.*, <=1.13.0 tqdm>=4.57.0, <4.65.0 PyYAML>=5.4, <=6.0 fsspec[http]>2021.06.0, <2022.8.0 -tensorboardX>=2.0, <=2.5.1 +tensorboardX>=2.4, <=2.5.1 torchmetrics>=0.7.0, <0.10.1 # needed for using fixed compare_version packaging>=17.0, <=21.3 typing-extensions>=4.0.0, <=4.4.0 From 968487b52958d99e855593114630beb65e0a3200 Mon Sep 17 00:00:00 2001 From: Jirka Date: Mon, 21 Nov 2022 20:37:16 +0100 Subject: [PATCH 31/31] tbx 2.2 --- requirements/pytorch/base.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/pytorch/base.txt b/requirements/pytorch/base.txt index abd317519a57c..6808fabb8e403 100644 --- a/requirements/pytorch/base.txt +++ b/requirements/pytorch/base.txt @@ -6,7 +6,7 @@ torch>=1.10.*, <=1.13.0 tqdm>=4.57.0, <4.65.0 PyYAML>=5.4, <=6.0 fsspec[http]>2021.06.0, <2022.8.0 -tensorboardX>=2.4, <=2.5.1 +tensorboardX>=2.2, <=2.5.1 # min version is set by torch.onnx missing attribute torchmetrics>=0.7.0, <0.10.1 # needed for using fixed compare_version packaging>=17.0, <=21.3 typing-extensions>=4.0.0, <=4.4.0