From af21c52d32b3fcd5a4e2c82dda6bc8ddd0afa966 Mon Sep 17 00:00:00 2001 From: Gautier Dagan Date: Fri, 1 Jul 2022 10:07:09 +0100 Subject: [PATCH 01/14] fix mypy errors for loggers/wandb.py remove extra assert --- pyproject.toml | 1 - src/pytorch_lightning/loggers/logger.py | 4 +-- src/pytorch_lightning/loggers/wandb.py | 36 ++++++++++++------------- 3 files changed, 20 insertions(+), 21 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index dc9db77d6dabd..fc696d0cd6b6e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -61,7 +61,6 @@ module = [ "pytorch_lightning.loggers.mlflow", "pytorch_lightning.loggers.neptune", "pytorch_lightning.loggers.tensorboard", - "pytorch_lightning.loggers.wandb", "pytorch_lightning.loops.epoch.training_epoch_loop", "pytorch_lightning.strategies.ddp", "pytorch_lightning.strategies.ddp2", diff --git a/src/pytorch_lightning/loggers/logger.py b/src/pytorch_lightning/loggers/logger.py index d532aae413650..bd2ae372ce343 100644 --- a/src/pytorch_lightning/loggers/logger.py +++ b/src/pytorch_lightning/loggers/logger.py @@ -190,12 +190,12 @@ def group_separator(self): @property @abstractmethod - def name(self) -> str: + def name(self) -> Optional[str]: """Return the experiment name.""" @property @abstractmethod - def version(self) -> Union[int, str]: + def version(self) -> Optional[Union[int, str]]: """Return the experiment version.""" diff --git a/src/pytorch_lightning/loggers/wandb.py b/src/pytorch_lightning/loggers/wandb.py index 88439cd9435db..9dc85f030a8cb 100644 --- a/src/pytorch_lightning/loggers/wandb.py +++ b/src/pytorch_lightning/loggers/wandb.py @@ -32,10 +32,11 @@ try: import wandb + from wandb.sdk.lib import RunDisabled from wandb.wandb_run import Run except ModuleNotFoundError: # needed for test mocks, these tests shall be updated - wandb, Run = None, None + wandb, Run, RunDisabled = None, None, None # type: ignore class WandbLogger(Logger): @@ -251,17 +252,17 @@ def __init__( self, name: Optional[str] = None, save_dir: Optional[str] = None, - offline: Optional[bool] = False, + offline: bool = False, id: Optional[str] = None, anonymous: Optional[bool] = None, version: Optional[str] = None, project: Optional[str] = None, log_model: Union[str, bool] = False, - experiment=None, - prefix: Optional[str] = "", + experiment: Union[Run, RunDisabled, None] = None, + prefix: str = "", agg_key_funcs: Optional[Mapping[str, Callable[[Sequence[float]], float]]] = None, agg_default_func: Optional[Callable[[Sequence[float]], float]] = None, - **kwargs, + **kwargs: Any, ): if wandb is None: raise ModuleNotFoundError( @@ -288,17 +289,16 @@ def __init__( self._log_model = log_model self._prefix = prefix self._experiment = experiment - self._logged_model_time = {} - self._checkpoint_callback = None + self._logged_model_time: Dict[str, float] = {} + self._checkpoint_callback: Optional["ReferenceType[Checkpoint]"] = None # set wandb init arguments - anonymous_lut = {True: "allow", False: None} - self._wandb_init = dict( + self._wandb_init: Dict[str, Any] = dict( name=name or project, project=project, id=version or id, dir=save_dir, resume="allow", - anonymous=anonymous_lut.get(anonymous, anonymous), + anonymous="allow" if anonymous else None, ) self._wandb_init.update(**kwargs) # extract parameters @@ -310,7 +310,7 @@ def __init__( wandb.require("service") _ = self.experiment - def __getstate__(self): + def __getstate__(self) -> Dict[str, Any]: state = self.__dict__.copy() # args needed to reload correct experiment if self._experiment is not None: @@ -322,7 +322,7 @@ def __getstate__(self): state["_experiment"] = None return state - @property + @property # type: ignore @rank_zero_experiment def experiment(self) -> Run: r""" @@ -357,13 +357,14 @@ def experiment(self) -> Run: self._experiment = wandb.init(**self._wandb_init) # define default x-axis - if getattr(self._experiment, "define_metric", None): + if isinstance(self._experiment, Run) and getattr(self._experiment, "define_metric", None): self._experiment.define_metric("trainer/global_step") self._experiment.define_metric("*", step_metric="trainer/global_step", step_sync=True) + assert isinstance(self._experiment, Run) return self._experiment - def watch(self, model: nn.Module, log: str = "gradients", log_freq: int = 100, log_graph: bool = True): + def watch(self, model: nn.Module, log: str = "gradients", log_freq: int = 100, log_graph: bool = True) -> None: self.experiment.watch(model, log=log, log_freq=log_freq, log_graph=log_graph) @rank_zero_only @@ -379,9 +380,8 @@ def log_metrics(self, metrics: Dict[str, float], step: Optional[int] = None) -> metrics = _add_prefix(metrics, self._prefix, self.LOGGER_JOIN_CHAR) if step is not None: - self.experiment.log({**metrics, "trainer/global_step": step}) - else: - self.experiment.log(metrics) + metrics["trainer/global_step"] = step + self.experiment.log(metrics) @rank_zero_only def log_table( @@ -417,7 +417,7 @@ def log_text( self.log_table(key, columns, data, dataframe, step) @rank_zero_only - def log_image(self, key: str, images: List[Any], step: Optional[int] = None, **kwargs: str) -> None: + def log_image(self, key: str, images: List[Any], step: Optional[int] = None, **kwargs: Any) -> None: """Log images (tensors, numpy arrays, PIL Images or file paths). Optional kwargs are lists passed to each image (ex: caption, masks, boxes). From 6cd9fb0ffaeab5916a647124bd3298dc3d23cc14 Mon Sep 17 00:00:00 2001 From: Gautier Dagan Date: Fri, 1 Jul 2022 11:42:33 +0100 Subject: [PATCH 02/14] fix metric test fail --- src/pytorch_lightning/loggers/wandb.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/src/pytorch_lightning/loggers/wandb.py b/src/pytorch_lightning/loggers/wandb.py index 9dc85f030a8cb..debb663a9087e 100644 --- a/src/pytorch_lightning/loggers/wandb.py +++ b/src/pytorch_lightning/loggers/wandb.py @@ -380,8 +380,9 @@ def log_metrics(self, metrics: Dict[str, float], step: Optional[int] = None) -> metrics = _add_prefix(metrics, self._prefix, self.LOGGER_JOIN_CHAR) if step is not None: - metrics["trainer/global_step"] = step - self.experiment.log(metrics) + self.experiment.log(dict(metrics, **{"trainer/global_step": step})) + else: + self.experiment.log(metrics) @rank_zero_only def log_table( From 0f7a5e8d9b35ec05a18569ac26c4652504d53b44 Mon Sep 17 00:00:00 2001 From: Gautier Dagan Date: Fri, 1 Jul 2022 12:37:00 +0100 Subject: [PATCH 03/14] fix wandb logger tests mocking Run class --- tests/tests_pytorch/loggers/test_all.py | 11 +++--- tests/tests_pytorch/loggers/test_wandb.py | 42 ++++++++++++----------- 2 files changed, 28 insertions(+), 25 deletions(-) diff --git a/tests/tests_pytorch/loggers/test_all.py b/tests/tests_pytorch/loggers/test_all.py index 96d1016cc612b..b96481c3e4156 100644 --- a/tests/tests_pytorch/loggers/test_all.py +++ b/tests/tests_pytorch/loggers/test_all.py @@ -364,11 +364,12 @@ def test_logger_with_prefix_all(tmpdir, monkeypatch): # WandB with mock.patch("pytorch_lightning.loggers.wandb.wandb") as wandb: - logger = _instantiate_logger(WandbLogger, save_dir=tmpdir, prefix=prefix) - wandb.run = None - wandb.init().step = 0 - logger.log_metrics({"test": 1.0}, step=0) - logger.experiment.log.assert_called_once_with({"tmp-test": 1.0, "trainer/global_step": 0}) + with mock.patch("pytorch_lightning.loggers.wandb.Run", new=mock.Mock): + logger = _instantiate_logger(WandbLogger, save_dir=tmpdir, prefix=prefix) + wandb.run = None + wandb.init().step = 0 + logger.log_metrics({"test": 1.0}, step=0) + logger.experiment.log.assert_called_once_with({"tmp-test": 1.0, "trainer/global_step": 0}) def test_logger_default_name(tmpdir): diff --git a/tests/tests_pytorch/loggers/test_wandb.py b/tests/tests_pytorch/loggers/test_wandb.py index f62ebff9e719a..cb757bdd1a431 100644 --- a/tests/tests_pytorch/loggers/test_wandb.py +++ b/tests/tests_pytorch/loggers/test_wandb.py @@ -23,7 +23,7 @@ from pytorch_lightning.utilities.exceptions import MisconfigurationException from tests_pytorch.helpers.utils import no_warning_call - +@mock.patch("pytorch_lightning.loggers.wandb.Run", new=mock.Mock) @mock.patch("pytorch_lightning.loggers.wandb.wandb") def test_wandb_logger_init(wandb, monkeypatch): """Verify that basic functionality of wandb logger works. @@ -111,33 +111,35 @@ class Experiment: def name(self): return "the_run_name" - wandb.run = None - wandb.init.return_value = Experiment() - logger = WandbLogger(id="the_id", offline=True) + with mock.patch("pytorch_lightning.loggers.wandb.Run", new=Experiment): - trainer = Trainer(default_root_dir=tmpdir, max_epochs=1, logger=logger) - # Access the experiment to ensure it's created - assert trainer.logger.experiment, "missing experiment" - assert trainer.log_dir == logger.save_dir - pkl_bytes = pickle.dumps(trainer) - trainer2 = pickle.loads(pkl_bytes) + wandb.run = None + wandb.init.return_value = Experiment() + logger = WandbLogger(id="the_id", offline=True) - assert os.environ["WANDB_MODE"] == "dryrun" - assert trainer2.logger.__class__.__name__ == WandbLogger.__name__ - assert trainer2.logger.experiment, "missing experiment" + trainer = Trainer(default_root_dir=tmpdir, max_epochs=1, logger=logger) + # Access the experiment to ensure it's created + assert trainer.logger.experiment, "missing experiment" + assert trainer.log_dir == logger.save_dir + pkl_bytes = pickle.dumps(trainer) + trainer2 = pickle.loads(pkl_bytes) - wandb.init.assert_called() - assert "id" in wandb.init.call_args[1] - assert wandb.init.call_args[1]["id"] == "the_id" + assert os.environ["WANDB_MODE"] == "dryrun" + assert trainer2.logger.__class__.__name__ == WandbLogger.__name__ + assert trainer2.logger.experiment, "missing experiment" - del os.environ["WANDB_MODE"] + wandb.init.assert_called() + assert "id" in wandb.init.call_args[1] + assert wandb.init.call_args[1]["id"] == "the_id" + del os.environ["WANDB_MODE"] +@mock.patch("pytorch_lightning.loggers.wandb.Run", new=mock.Mock) @mock.patch("pytorch_lightning.loggers.wandb.wandb") def test_wandb_logger_dirs_creation(wandb, monkeypatch, tmpdir): """Test that the logger creates the folders and files in the right place.""" import pytorch_lightning.loggers.wandb as imports - + monkeypatch.setattr(imports, "_WANDB_GREATER_EQUAL_0_12_10", True) wandb.run = None logger = WandbLogger(save_dir=str(tmpdir), offline=True) @@ -168,7 +170,7 @@ def test_wandb_logger_dirs_creation(wandb, monkeypatch, tmpdir): assert set(os.listdir(trainer.checkpoint_callback.dirpath)) == {"epoch=0-step=3.ckpt"} assert trainer.log_dir == logger.save_dir - +@mock.patch("pytorch_lightning.loggers.wandb.Run", new=mock.Mock) @mock.patch("pytorch_lightning.loggers.wandb.wandb") def test_wandb_log_model(wandb, monkeypatch, tmpdir): """Test that the logger creates the folders and files in the right place.""" @@ -233,7 +235,7 @@ def test_wandb_log_model(wandb, monkeypatch, tmpdir): }, ) - +@mock.patch("pytorch_lightning.loggers.wandb.Run", new=mock.Mock) @mock.patch("pytorch_lightning.loggers.wandb.wandb") def test_wandb_log_media(wandb, tmpdir): """Test that the logger creates the folders and files in the right place.""" From bb64c58dffa382e1aef0ec4d5513baaaccbe6a77 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Fri, 1 Jul 2022 11:38:57 +0000 Subject: [PATCH 04/14] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- tests/tests_pytorch/loggers/test_wandb.py | 14 +++++++++----- 1 file changed, 9 insertions(+), 5 deletions(-) diff --git a/tests/tests_pytorch/loggers/test_wandb.py b/tests/tests_pytorch/loggers/test_wandb.py index cb757bdd1a431..c19b2b1146739 100644 --- a/tests/tests_pytorch/loggers/test_wandb.py +++ b/tests/tests_pytorch/loggers/test_wandb.py @@ -23,7 +23,8 @@ from pytorch_lightning.utilities.exceptions import MisconfigurationException from tests_pytorch.helpers.utils import no_warning_call -@mock.patch("pytorch_lightning.loggers.wandb.Run", new=mock.Mock) + +@mock.patch("pytorch_lightning.loggers.wandb.Run", new=mock.Mock) @mock.patch("pytorch_lightning.loggers.wandb.wandb") def test_wandb_logger_init(wandb, monkeypatch): """Verify that basic functionality of wandb logger works. @@ -134,12 +135,13 @@ def name(self): del os.environ["WANDB_MODE"] -@mock.patch("pytorch_lightning.loggers.wandb.Run", new=mock.Mock) + +@mock.patch("pytorch_lightning.loggers.wandb.Run", new=mock.Mock) @mock.patch("pytorch_lightning.loggers.wandb.wandb") def test_wandb_logger_dirs_creation(wandb, monkeypatch, tmpdir): """Test that the logger creates the folders and files in the right place.""" import pytorch_lightning.loggers.wandb as imports - + monkeypatch.setattr(imports, "_WANDB_GREATER_EQUAL_0_12_10", True) wandb.run = None logger = WandbLogger(save_dir=str(tmpdir), offline=True) @@ -170,7 +172,8 @@ def test_wandb_logger_dirs_creation(wandb, monkeypatch, tmpdir): assert set(os.listdir(trainer.checkpoint_callback.dirpath)) == {"epoch=0-step=3.ckpt"} assert trainer.log_dir == logger.save_dir -@mock.patch("pytorch_lightning.loggers.wandb.Run", new=mock.Mock) + +@mock.patch("pytorch_lightning.loggers.wandb.Run", new=mock.Mock) @mock.patch("pytorch_lightning.loggers.wandb.wandb") def test_wandb_log_model(wandb, monkeypatch, tmpdir): """Test that the logger creates the folders and files in the right place.""" @@ -235,7 +238,8 @@ def test_wandb_log_model(wandb, monkeypatch, tmpdir): }, ) -@mock.patch("pytorch_lightning.loggers.wandb.Run", new=mock.Mock) + +@mock.patch("pytorch_lightning.loggers.wandb.Run", new=mock.Mock) @mock.patch("pytorch_lightning.loggers.wandb.wandb") def test_wandb_log_media(wandb, tmpdir): """Test that the logger creates the folders and files in the right place.""" From 98e8bf5fbc021dbdb54f850ae4cd8eccb29ab430 Mon Sep 17 00:00:00 2001 From: Gautier Dagan Date: Fri, 1 Jul 2022 12:50:18 +0100 Subject: [PATCH 05/14] fix final test_all_loggers test --- tests/tests_pytorch/loggers/test_all.py | 1 + 1 file changed, 1 insertion(+) diff --git a/tests/tests_pytorch/loggers/test_all.py b/tests/tests_pytorch/loggers/test_all.py index b96481c3e4156..ed15c37d8242f 100644 --- a/tests/tests_pytorch/loggers/test_all.py +++ b/tests/tests_pytorch/loggers/test_all.py @@ -45,6 +45,7 @@ mock.patch("pytorch_lightning.loggers.mlflow.MlflowClient"), mock.patch("pytorch_lightning.loggers.neptune.neptune", new_callable=create_neptune_mock), mock.patch("pytorch_lightning.loggers.wandb.wandb"), + mock.patch("pytorch_lightning.loggers.wandb.Run", new=mock.Mock), ) ALL_LOGGER_CLASSES = ( CometLogger, From 762e141b0d0595af8c1ab2b17cc597044cbe9d50 Mon Sep 17 00:00:00 2001 From: Gautier Dagan Date: Fri, 1 Jul 2022 15:35:56 +0100 Subject: [PATCH 06/14] fix: remove typing that causes cli test to fail and add type ignore --- src/pytorch_lightning/loggers/wandb.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/pytorch_lightning/loggers/wandb.py b/src/pytorch_lightning/loggers/wandb.py index debb663a9087e..ba02c570e197f 100644 --- a/src/pytorch_lightning/loggers/wandb.py +++ b/src/pytorch_lightning/loggers/wandb.py @@ -292,7 +292,7 @@ def __init__( self._logged_model_time: Dict[str, float] = {} self._checkpoint_callback: Optional["ReferenceType[Checkpoint]"] = None # set wandb init arguments - self._wandb_init: Dict[str, Any] = dict( + self._wandb_init = dict( name=name or project, project=project, id=version or id, @@ -354,7 +354,7 @@ def experiment(self) -> Run: self._experiment = wandb._attach(attach_id) else: # create new wandb process - self._experiment = wandb.init(**self._wandb_init) + self._experiment = wandb.init(**self._wandb_init) # type: ignore # define default x-axis if isinstance(self._experiment, Run) and getattr(self._experiment, "define_metric", None): From 459233b494c26f71c48fd784bc5ae30113448bef Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Carlos=20Mochol=C3=AD?= Date: Fri, 1 Jul 2022 17:03:42 +0200 Subject: [PATCH 07/14] Minor test simplification --- tests/tests_pytorch/loggers/test_all.py | 13 ++++++------- tests/tests_pytorch/loggers/test_wandb.py | 9 ++++----- 2 files changed, 10 insertions(+), 12 deletions(-) diff --git a/tests/tests_pytorch/loggers/test_all.py b/tests/tests_pytorch/loggers/test_all.py index ed15c37d8242f..94923dd0faf6f 100644 --- a/tests/tests_pytorch/loggers/test_all.py +++ b/tests/tests_pytorch/loggers/test_all.py @@ -364,13 +364,12 @@ def test_logger_with_prefix_all(tmpdir, monkeypatch): logger.experiment.add_scalar.assert_called_once_with("tmp-test", 1.0, 0) # WandB - with mock.patch("pytorch_lightning.loggers.wandb.wandb") as wandb: - with mock.patch("pytorch_lightning.loggers.wandb.Run", new=mock.Mock): - logger = _instantiate_logger(WandbLogger, save_dir=tmpdir, prefix=prefix) - wandb.run = None - wandb.init().step = 0 - logger.log_metrics({"test": 1.0}, step=0) - logger.experiment.log.assert_called_once_with({"tmp-test": 1.0, "trainer/global_step": 0}) + with mock.patch("pytorch_lightning.loggers.wandb.wandb") as wandb, mock.patch("pytorch_lightning.loggers.wandb.Run", new=mock.Mock): + logger = _instantiate_logger(WandbLogger, save_dir=tmpdir, prefix=prefix) + wandb.run = None + wandb.init().step = 0 + logger.log_metrics({"test": 1.0}, step=0) + logger.experiment.log.assert_called_once_with({"tmp-test": 1.0, "trainer/global_step": 0}) def test_logger_default_name(tmpdir): diff --git a/tests/tests_pytorch/loggers/test_wandb.py b/tests/tests_pytorch/loggers/test_wandb.py index c19b2b1146739..48162e6d9d2e2 100644 --- a/tests/tests_pytorch/loggers/test_wandb.py +++ b/tests/tests_pytorch/loggers/test_wandb.py @@ -113,7 +113,6 @@ def name(self): return "the_run_name" with mock.patch("pytorch_lightning.loggers.wandb.Run", new=Experiment): - wandb.run = None wandb.init.return_value = Experiment() logger = WandbLogger(id="the_id", offline=True) @@ -129,11 +128,11 @@ def name(self): assert trainer2.logger.__class__.__name__ == WandbLogger.__name__ assert trainer2.logger.experiment, "missing experiment" - wandb.init.assert_called() - assert "id" in wandb.init.call_args[1] - assert wandb.init.call_args[1]["id"] == "the_id" + wandb.init.assert_called() + assert "id" in wandb.init.call_args[1] + assert wandb.init.call_args[1]["id"] == "the_id" - del os.environ["WANDB_MODE"] + del os.environ["WANDB_MODE"] @mock.patch("pytorch_lightning.loggers.wandb.Run", new=mock.Mock) From ec8df7533872b0cd1f6ae70f4b5ee0839264eb16 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Fri, 1 Jul 2022 15:05:30 +0000 Subject: [PATCH 08/14] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- tests/tests_pytorch/loggers/test_all.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/tests/tests_pytorch/loggers/test_all.py b/tests/tests_pytorch/loggers/test_all.py index 94923dd0faf6f..d613296abccf5 100644 --- a/tests/tests_pytorch/loggers/test_all.py +++ b/tests/tests_pytorch/loggers/test_all.py @@ -364,7 +364,9 @@ def test_logger_with_prefix_all(tmpdir, monkeypatch): logger.experiment.add_scalar.assert_called_once_with("tmp-test", 1.0, 0) # WandB - with mock.patch("pytorch_lightning.loggers.wandb.wandb") as wandb, mock.patch("pytorch_lightning.loggers.wandb.Run", new=mock.Mock): + with mock.patch("pytorch_lightning.loggers.wandb.wandb") as wandb, mock.patch( + "pytorch_lightning.loggers.wandb.Run", new=mock.Mock + ): logger = _instantiate_logger(WandbLogger, save_dir=tmpdir, prefix=prefix) wandb.run = None wandb.init().step = 0 From 77885c5554b6e08cc46ddacffe8364fcc23176f5 Mon Sep 17 00:00:00 2001 From: Gautier Dagan Date: Fri, 1 Jul 2022 16:12:51 +0100 Subject: [PATCH 09/14] minor upgrade argparsejson to 4.10.2 --- requirements/pytorch/extra.txt | 2 +- src/pytorch_lightning/loggers/wandb.py | 4 ++-- src/pytorch_lightning/utilities/cli.py | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/requirements/pytorch/extra.txt b/requirements/pytorch/extra.txt index 721b9c3b6a6d5..1c1c5e97c375a 100644 --- a/requirements/pytorch/extra.txt +++ b/requirements/pytorch/extra.txt @@ -3,6 +3,6 @@ matplotlib>3.1, <3.5.3 torchtext>=0.10.*, <=0.12.0 omegaconf>=2.0.5, <2.3.0 hydra-core>=1.0.5, <1.3.0 -jsonargparse[signatures]>=4.10.0, <=4.10.0 +jsonargparse[signatures]>=4.10.2, <=4.10.2 gcsfs>=2021.5.0, <2022.6.0 rich>=10.2.2, !=10.15.0.a, <13.0.0 diff --git a/src/pytorch_lightning/loggers/wandb.py b/src/pytorch_lightning/loggers/wandb.py index ba02c570e197f..debb663a9087e 100644 --- a/src/pytorch_lightning/loggers/wandb.py +++ b/src/pytorch_lightning/loggers/wandb.py @@ -292,7 +292,7 @@ def __init__( self._logged_model_time: Dict[str, float] = {} self._checkpoint_callback: Optional["ReferenceType[Checkpoint]"] = None # set wandb init arguments - self._wandb_init = dict( + self._wandb_init: Dict[str, Any] = dict( name=name or project, project=project, id=version or id, @@ -354,7 +354,7 @@ def experiment(self) -> Run: self._experiment = wandb._attach(attach_id) else: # create new wandb process - self._experiment = wandb.init(**self._wandb_init) # type: ignore + self._experiment = wandb.init(**self._wandb_init) # define default x-axis if isinstance(self._experiment, Run) and getattr(self._experiment, "define_metric", None): diff --git a/src/pytorch_lightning/utilities/cli.py b/src/pytorch_lightning/utilities/cli.py index f9d3375a6c6d8..a66cd6c0899cd 100644 --- a/src/pytorch_lightning/utilities/cli.py +++ b/src/pytorch_lightning/utilities/cli.py @@ -31,7 +31,7 @@ from pytorch_lightning.utilities.model_helpers import is_overridden from pytorch_lightning.utilities.rank_zero import _warn, rank_zero_deprecation, rank_zero_warn -_JSONARGPARSE_SIGNATURES_AVAILABLE = _RequirementAvailable("jsonargparse[signatures]>=4.10.0") +_JSONARGPARSE_SIGNATURES_AVAILABLE = _RequirementAvailable("jsonargparse[signatures]>=4.10.2") if _JSONARGPARSE_SIGNATURES_AVAILABLE: import docstring_parser From aee3cf404c800d4c4a71fbb5aa9acea04c5d1678 Mon Sep 17 00:00:00 2001 From: Gautier Dagan Date: Fri, 1 Jul 2022 16:22:15 +0100 Subject: [PATCH 10/14] specify type ignore[misc] --- src/pytorch_lightning/loggers/wandb.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/pytorch_lightning/loggers/wandb.py b/src/pytorch_lightning/loggers/wandb.py index debb663a9087e..78c87112916ba 100644 --- a/src/pytorch_lightning/loggers/wandb.py +++ b/src/pytorch_lightning/loggers/wandb.py @@ -322,7 +322,7 @@ def __getstate__(self) -> Dict[str, Any]: state["_experiment"] = None return state - @property # type: ignore + @property # type: ignore[misc] @rank_zero_experiment def experiment(self) -> Run: r""" From 91bb25dcdc086fbb3012ae2f0aefd00f892f3a2b Mon Sep 17 00:00:00 2001 From: Rohit Gupta Date: Thu, 14 Jul 2022 17:26:09 +0530 Subject: [PATCH 11/14] Apply suggestions from code review Co-authored-by: Akihiro Nitta --- src/pytorch_lightning/loggers/wandb.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/src/pytorch_lightning/loggers/wandb.py b/src/pytorch_lightning/loggers/wandb.py index 78c87112916ba..dffc965ef801c 100644 --- a/src/pytorch_lightning/loggers/wandb.py +++ b/src/pytorch_lightning/loggers/wandb.py @@ -263,7 +263,7 @@ def __init__( agg_key_funcs: Optional[Mapping[str, Callable[[Sequence[float]], float]]] = None, agg_default_func: Optional[Callable[[Sequence[float]], float]] = None, **kwargs: Any, - ): + ) -> None: if wandb is None: raise ModuleNotFoundError( "You want to use `wandb` logger which is not installed yet," @@ -380,7 +380,8 @@ def log_metrics(self, metrics: Dict[str, float], step: Optional[int] = None) -> metrics = _add_prefix(metrics, self._prefix, self.LOGGER_JOIN_CHAR) if step is not None: - self.experiment.log(dict(metrics, **{"trainer/global_step": step})) + metrics["trainer/global_step"] = step + self.experiment.log(**metrics) else: self.experiment.log(metrics) From b548033321ee5be123133918b6d4d9b6c0015f6b Mon Sep 17 00:00:00 2001 From: Rohit Gupta Date: Thu, 14 Jul 2022 22:29:41 +0530 Subject: [PATCH 12/14] rev --- src/pytorch_lightning/loggers/wandb.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/src/pytorch_lightning/loggers/wandb.py b/src/pytorch_lightning/loggers/wandb.py index dffc965ef801c..53fbd2b1097f8 100644 --- a/src/pytorch_lightning/loggers/wandb.py +++ b/src/pytorch_lightning/loggers/wandb.py @@ -298,7 +298,7 @@ def __init__( id=version or id, dir=save_dir, resume="allow", - anonymous="allow" if anonymous else None, + anonymous=("allow" if anonymous else None), ) self._wandb_init.update(**kwargs) # extract parameters @@ -380,8 +380,7 @@ def log_metrics(self, metrics: Dict[str, float], step: Optional[int] = None) -> metrics = _add_prefix(metrics, self._prefix, self.LOGGER_JOIN_CHAR) if step is not None: - metrics["trainer/global_step"] = step - self.experiment.log(**metrics) + self.experiment.log(dict(metrics, **{"trainer/global_step": step})) else: self.experiment.log(metrics) From b2386cc07ec70f1fe4dbc63bde31a843a8cb6cff Mon Sep 17 00:00:00 2001 From: Gautier Dagan Date: Tue, 19 Jul 2022 11:41:30 +0200 Subject: [PATCH 13/14] upgrade wandb to 0.10.22 --- environment.yml | 2 +- requirements/pytorch/loggers.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/environment.yml b/environment.yml index d6f885f00bc72..f26e93031770e 100644 --- a/environment.yml +++ b/environment.yml @@ -50,5 +50,5 @@ dependencies: - test-tube>=0.7.5 - mlflow>=1.0.0 - comet_ml>=3.1.12 - - wandb>=0.8.21 + - wandb>=0.10.22 - neptune-client>=0.10.0 diff --git a/requirements/pytorch/loggers.txt b/requirements/pytorch/loggers.txt index 2abcb4b2df31f..a857ab5660d54 100644 --- a/requirements/pytorch/loggers.txt +++ b/requirements/pytorch/loggers.txt @@ -4,4 +4,4 @@ neptune-client>=0.10.0, <0.16.4 comet-ml>=3.1.12, <3.31.6 mlflow>=1.0.0, <1.27.0 test_tube>=0.7.5, <=0.7.5 -wandb>=0.8.21, <0.12.20 +wandb>=0.10.22, <0.12.20 From ae4eaf19f9fc9eb200de34af9920b1dfa6b7b565 Mon Sep 17 00:00:00 2001 From: otaj Date: Wed, 20 Jul 2022 18:27:15 +0200 Subject: [PATCH 14/14] empty