From e38b4aed769169d9403f7b71ffc359d1ee26774d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Carlos=20Mochol=C3=AD?= Date: Tue, 8 Jun 2021 03:00:47 +0200 Subject: [PATCH 1/3] Only track dev debugger events if enabled --- pytorch_lightning/utilities/debugging.py | 1 + 1 file changed, 1 insertion(+) diff --git a/pytorch_lightning/utilities/debugging.py b/pytorch_lightning/utilities/debugging.py index 3a3afd2b36329..b4388bf89c195 100644 --- a/pytorch_lightning/utilities/debugging.py +++ b/pytorch_lightning/utilities/debugging.py @@ -51,6 +51,7 @@ def __init__(self, trainer): self.test_dataloader_calls = [] self.dataloader_sequence_calls = [] + @enabled_only def track_event( self, evt_type: str, From 22f70cfd2ecdbc4a0e82fc3d48bc18dc23a059e8 Mon Sep 17 00:00:00 2001 From: Carlos Mocholi Date: Tue, 8 Jun 2021 11:21:49 +0200 Subject: [PATCH 2/3] Update CHANGELOG --- CHANGELOG.md | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 7b2d79ba0fa4a..bd7a29a49c4c6 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -184,6 +184,10 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/). - Fixed global step update when the epoch is skipped ([#7677](https://github.com/PyTorchLightning/pytorch-lightning/pull/7677)) +- Fixed dev debugger memory growing due to tracking events even when disabled ([#7875](https://github.com/PyTorchLightning/pytorch-lightning/pull/7875)) + + + - Fixed training loop total batch counter when accumulate grad batches was enabled ([#7692](https://github.com/PyTorchLightning/pytorch-lightning/pull/7692)) From 0b4e72df748030925796c677ae986a7fe780387e Mon Sep 17 00:00:00 2001 From: Carlos Mocholi Date: Tue, 8 Jun 2021 11:22:27 +0200 Subject: [PATCH 3/3] whitespace --- CHANGELOG.md | 1 - 1 file changed, 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index bd7a29a49c4c6..87b7f6bb51b5e 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -187,7 +187,6 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/). - Fixed dev debugger memory growing due to tracking events even when disabled ([#7875](https://github.com/PyTorchLightning/pytorch-lightning/pull/7875)) - - Fixed training loop total batch counter when accumulate grad batches was enabled ([#7692](https://github.com/PyTorchLightning/pytorch-lightning/pull/7692))