From 5861aeb14f4a4bdc1625842d5f116f3c040cba2f Mon Sep 17 00:00:00 2001 From: Jerome Date: Thu, 28 Apr 2022 13:02:51 +0300 Subject: [PATCH 1/2] Invoke parent DDP configuration for torch>1.10.2 Signed-off-by: Jerome --- pytorch_lightning/strategies/hpu_parallel.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pytorch_lightning/strategies/hpu_parallel.py b/pytorch_lightning/strategies/hpu_parallel.py index 562a841b89510..4996ddabcf960 100644 --- a/pytorch_lightning/strategies/hpu_parallel.py +++ b/pytorch_lightning/strategies/hpu_parallel.py @@ -103,7 +103,7 @@ def configure_ddp(self) -> None: self._model._set_static_graph() # type: ignore self._register_ddp_hooks() else: - self.configure_ddp() + super().configure_ddp() def broadcast(self, obj: object, src: int = 0) -> object: # type: ignore obj = [obj] From 172cd23baf174b264cc9f9bfda96a80c2e354bdb Mon Sep 17 00:00:00 2001 From: Jerome Date: Thu, 28 Apr 2022 13:18:39 +0300 Subject: [PATCH 2/2] update change log Signed-off-by: Jerome --- CHANGELOG.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 9c4d8421ee95f..1f60ff7f13bf7 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -147,7 +147,7 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/). - Fixed an issue to ensure all the checkpoint states are saved in a common filepath with `DeepspeedStrategy` ([#12887](https://github.com/PyTorchLightning/pytorch-lightning/pull/12887)) -- +- Fixed an issue wrt recursive invocation of DDP configuration in hpu parallel plugin ([#12912](https://github.com/PyTorchLightning/pytorch-lightning/pull/12912)) -