diff --git a/pytorch_lightning/callbacks/gradient_accumulation_scheduler.py b/pytorch_lightning/callbacks/gradient_accumulation_scheduler.py index fcb9b152d8333..00d73392c6ede 100644 --- a/pytorch_lightning/callbacks/gradient_accumulation_scheduler.py +++ b/pytorch_lightning/callbacks/gradient_accumulation_scheduler.py @@ -14,9 +14,9 @@ class GradientAccumulationScheduler(Callback): Change gradient accumulation factor according to scheduling. Args: - scheduling (dict): scheduling in format {epoch: accumulation_factor} - .. warning:: Epochs indexing starts from "1" until v0.6.x, but will start from "0" in - v0.8.0. + scheduling: scheduling in format {epoch: accumulation_factor} + .. warning:: Epochs indexing starts from "1" until v0.6.x, + but will start from "0" in v0.8.0. Example:: diff --git a/pytorch_lightning/core/model_saving.py b/pytorch_lightning/core/model_saving.py index 278e6467dc1d2..54f8fbc4c83be 100644 --- a/pytorch_lightning/core/model_saving.py +++ b/pytorch_lightning/core/model_saving.py @@ -8,4 +8,4 @@ warnings.warn("`model_saving` module has been renamed to `saving` since v0.6.0." " The deprecated module name will be removed in v0.8.0.", DeprecationWarning) -from pytorch_lightning.core.saving import ModelIO # noqa: E402 +from pytorch_lightning.core.saving import * # noqa: F403 diff --git a/pytorch_lightning/core/root_module.py b/pytorch_lightning/core/root_module.py index 07f290492902c..af9e89d4105c4 100644 --- a/pytorch_lightning/core/root_module.py +++ b/pytorch_lightning/core/root_module.py @@ -5,5 +5,7 @@ import warnings +from pytorch_lightning.core.lightning import * # noqa: F403 + warnings.warn("`root_module` module has been renamed to `lightning` since v0.6.0." " The deprecated module name will be removed in v0.8.0.", DeprecationWarning) diff --git a/pytorch_lightning/loggers/comet.py b/pytorch_lightning/loggers/comet.py index 4399fa802e022..a7e8118869ce5 100644 --- a/pytorch_lightning/loggers/comet.py +++ b/pytorch_lightning/loggers/comet.py @@ -5,8 +5,9 @@ CometLogger ------------- """ + +import logging as log from argparse import Namespace -from logging import getLogger from typing import Optional, Dict, Union, Any try: @@ -29,8 +30,6 @@ from pytorch_lightning.utilities.debugging import MisconfigurationException from .base import LightningLoggerBase, rank_zero_only -logger = getLogger(__name__) - class CometLogger(LightningLoggerBase): r""" @@ -99,7 +98,7 @@ def __init__(self, api_key: Optional[str] = None, save_dir: Optional[str] = None # If neither api_key nor save_dir are passed as arguments, raise an exception raise MisconfigurationException("CometLogger requires either api_key or save_dir during initialization.") - logger.info(f"CometLogger will be initialized in {self.mode} mode") + log.info(f"CometLogger will be initialized in {self.mode} mode") self.workspace = workspace self.project_name = project_name @@ -118,7 +117,7 @@ def __init__(self, api_key: Optional[str] = None, save_dir: Optional[str] = None try: self.name = experiment_name except TypeError as e: - logger.exception("Failed to set experiment name for comet.ml logger") + log.exception("Failed to set experiment name for comet.ml logger") @property def experiment(self) -> CometBaseExperiment: diff --git a/pytorch_lightning/loggers/mlflow.py b/pytorch_lightning/loggers/mlflow.py index 7f05da9227bbe..ed878a0616e62 100644 --- a/pytorch_lightning/loggers/mlflow.py +++ b/pytorch_lightning/loggers/mlflow.py @@ -23,8 +23,8 @@ def any_lightning_module_function_or_hook(...): self.logger.experiment.whatever_ml_flow_supports(...) """ +import logging as log from argparse import Namespace -from logging import getLogger from time import time from typing import Optional, Dict, Any, Union @@ -36,8 +36,6 @@ def any_lightning_module_function_or_hook(...): from .base import LightningLoggerBase, rank_zero_only -logger = getLogger(__name__) - class MLFlowLogger(LightningLoggerBase): def __init__(self, experiment_name: str, tracking_uri: Optional[str] = None, @@ -80,7 +78,7 @@ def run_id(self): if expt: self._expt_id = expt.experiment_id else: - logger.warning(f'Experiment with name {self.experiment_name} not found. Creating it.') + log.warning(f'Experiment with name {self.experiment_name} not found. Creating it.') self._expt_id = self._mlflow_client.create_experiment(name=self.experiment_name) run = self._mlflow_client.create_run(experiment_id=self._expt_id, tags=self.tags) @@ -98,7 +96,7 @@ def log_metrics(self, metrics: Dict[str, float], step: Optional[int] = None) -> timestamp_ms = int(time() * 1000) for k, v in metrics.items(): if isinstance(v, str): - logger.warning(f'Discarding metric with string value {k}={v}.') + log.warning(f'Discarding metric with string value {k}={v}.') continue self.experiment.log_metric(self.run_id, k, v, timestamp_ms, step) diff --git a/pytorch_lightning/loggers/neptune.py b/pytorch_lightning/loggers/neptune.py index b02f175f2d5f5..5e011d7426424 100644 --- a/pytorch_lightning/loggers/neptune.py +++ b/pytorch_lightning/loggers/neptune.py @@ -6,8 +6,8 @@ NeptuneLogger -------------- """ +import logging as log from argparse import Namespace -from logging import getLogger from typing import Optional, List, Dict, Any, Union, Iterable try: @@ -22,8 +22,6 @@ from pytorch_lightning.loggers.base import LightningLoggerBase, rank_zero_only -logger = getLogger(__name__) - class NeptuneLogger(LightningLoggerBase): r""" @@ -138,7 +136,7 @@ def any_lightning_module_function_or_hook(...): neptune.init(api_token=self.api_key, project_qualified_name=self.project_name) - logger.info(f'NeptuneLogger was initialized in {self.mode} mode') + log.info(f'NeptuneLogger was initialized in {self.mode} mode') @property def experiment(self) -> Experiment: diff --git a/pytorch_lightning/logging/comet.py b/pytorch_lightning/logging/comet.py index d3397f17e0ed7..48a426dd4d53e 100644 --- a/pytorch_lightning/logging/comet.py +++ b/pytorch_lightning/logging/comet.py @@ -2,4 +2,4 @@ .. warning:: `logging` package has been renamed to `loggers` since v0.7.0 and will be removed in v0.9.0 """ -from pytorch_lightning.loggers import comet # noqa: F403 +from pytorch_lightning.loggers.comet import CometLogger # noqa: F403 diff --git a/pytorch_lightning/loggers/comet_logger.py b/pytorch_lightning/logging/comet_logger.py similarity index 100% rename from pytorch_lightning/loggers/comet_logger.py rename to pytorch_lightning/logging/comet_logger.py diff --git a/pytorch_lightning/logging/mlflow.py b/pytorch_lightning/logging/mlflow.py index 4d16e4184f2e8..895f41fc5175a 100644 --- a/pytorch_lightning/logging/mlflow.py +++ b/pytorch_lightning/logging/mlflow.py @@ -2,4 +2,4 @@ .. warning:: `logging` package has been renamed to `loggers` since v0.7.0 and will be removed in v0.9.0 """ -from pytorch_lightning.loggers import mlflow # noqa: F403 +from pytorch_lightning.loggers.mlflow import MLFlowLogger # noqa: F403 diff --git a/pytorch_lightning/loggers/mlflow_logger.py b/pytorch_lightning/logging/mlflow_logger.py similarity index 100% rename from pytorch_lightning/loggers/mlflow_logger.py rename to pytorch_lightning/logging/mlflow_logger.py diff --git a/pytorch_lightning/logging/neptune.py b/pytorch_lightning/logging/neptune.py index a952daed5b421..f1b64525fe160 100644 --- a/pytorch_lightning/logging/neptune.py +++ b/pytorch_lightning/logging/neptune.py @@ -2,4 +2,4 @@ .. warning:: `logging` package has been renamed to `loggers` since v0.7.0 and will be removed in v0.9.0 """ -from pytorch_lightning.loggers import neptune # noqa: F403 +from pytorch_lightning.loggers.neptune import NeptuneLogger # noqa: F403 diff --git a/pytorch_lightning/logging/test_tube.py b/pytorch_lightning/logging/test_tube.py index f3494c130f827..a9bc71e4885dd 100644 --- a/pytorch_lightning/logging/test_tube.py +++ b/pytorch_lightning/logging/test_tube.py @@ -2,4 +2,4 @@ .. warning:: `logging` package has been renamed to `loggers` since v0.7.0 and will be removed in v0.9.0 """ -from pytorch_lightning.loggers import test_tube # noqa: F403 +from pytorch_lightning.loggers.test_tube import TestTubeLogger # noqa: F403 diff --git a/pytorch_lightning/loggers/test_tube_logger.py b/pytorch_lightning/logging/test_tube_logger.py similarity index 100% rename from pytorch_lightning/loggers/test_tube_logger.py rename to pytorch_lightning/logging/test_tube_logger.py diff --git a/pytorch_lightning/logging/wandb.py b/pytorch_lightning/logging/wandb.py index bc112b1d9dbef..e4527b7b8734a 100644 --- a/pytorch_lightning/logging/wandb.py +++ b/pytorch_lightning/logging/wandb.py @@ -2,4 +2,4 @@ .. warning:: `logging` package has been renamed to `loggers` since v0.7.0 and will be removed in v0.9.0 """ -from pytorch_lightning.loggers import wandb # noqa: F403 +from pytorch_lightning.loggers.wandb import WandbLogger # noqa: F403 diff --git a/pytorch_lightning/pt_overrides/__init__.py b/pytorch_lightning/pt_overrides/__init__.py index 9db26c1e9058b..b68986d556ccc 100644 --- a/pytorch_lightning/pt_overrides/__init__.py +++ b/pytorch_lightning/pt_overrides/__init__.py @@ -7,5 +7,3 @@ warnings.warn("`pt_overrides` package has been renamed to `overrides` since v0.6.0." " The deprecated module name will be removed in v0.8.0.", DeprecationWarning) - -from pytorch_lightning.overrides import override_data_parallel # noqa: E402 diff --git a/pytorch_lightning/pt_overrides/override_data_parallel.py b/pytorch_lightning/pt_overrides/override_data_parallel.py new file mode 100644 index 0000000000000..bc435b7d0738e --- /dev/null +++ b/pytorch_lightning/pt_overrides/override_data_parallel.py @@ -0,0 +1,12 @@ +""" +.. warning:: `override_data_parallel` module has been renamed to `data_parallel` since v0.6.0. + The deprecated module name will be removed in v0.8.0. +""" + +import warnings + +warnings.warn("`override_data_parallel` module has been renamed to `data_parallel` since v0.6.0." + " The deprecated module name will be removed in v0.8.0.", DeprecationWarning) + +from pytorch_lightning.overrides.data_parallel import ( # noqa: F402 + get_a_var, parallel_apply, LightningDataParallel, LightningDistributedDataParallel) diff --git a/pytorch_lightning/root_module/decorators.py b/pytorch_lightning/root_module/decorators.py new file mode 100644 index 0000000000000..88afe093b97d6 --- /dev/null +++ b/pytorch_lightning/root_module/decorators.py @@ -0,0 +1,11 @@ +""" +.. warning:: `root_module.decorators` module has been renamed to `core.decorators` since v0.6.0. + The deprecated module name will be removed in v0.8.0. +""" + +import warnings + +warnings.warn("`root_module.decorators` module has been renamed to `core.decorators` since v0.6.0." + " The deprecated module name will be removed in v0.8.0.", DeprecationWarning) + +from pytorch_lightning.core.decorators import * # noqa: F403 diff --git a/pytorch_lightning/root_module/grads.py b/pytorch_lightning/root_module/grads.py new file mode 100644 index 0000000000000..1f9617385e9ef --- /dev/null +++ b/pytorch_lightning/root_module/grads.py @@ -0,0 +1,11 @@ +""" +.. warning:: `root_module.grads` module has been renamed to `core.grads` since v0.6.0. + The deprecated module name will be removed in v0.8.0. +""" + +import warnings + +warnings.warn("`root_module.grads` module has been renamed to `core.grads` since v0.6.0." + " The deprecated module name will be removed in v0.8.0.", DeprecationWarning) + +from pytorch_lightning.core.grads import * # noqa: F403 diff --git a/pytorch_lightning/root_module/hooks.py b/pytorch_lightning/root_module/hooks.py new file mode 100644 index 0000000000000..e4beaee999c7f --- /dev/null +++ b/pytorch_lightning/root_module/hooks.py @@ -0,0 +1,11 @@ +""" +.. warning:: `root_module.hooks` module has been renamed to `core.hooks` since v0.6.0. + The deprecated module name will be removed in v0.8.0. +""" + +import warnings + +warnings.warn("`root_module.hooks` module has been renamed to `core.hooks` since v0.6.0." + " The deprecated module name will be removed in v0.8.0.", DeprecationWarning) + +from pytorch_lightning.core.hooks import * # noqa: F403 diff --git a/pytorch_lightning/root_module/memory.py b/pytorch_lightning/root_module/memory.py new file mode 100644 index 0000000000000..ef739ac22f183 --- /dev/null +++ b/pytorch_lightning/root_module/memory.py @@ -0,0 +1,11 @@ +""" +.. warning:: `root_module.memory` module has been renamed to `core.memory` since v0.6.0. + The deprecated module name will be removed in v0.8.0. +""" + +import warnings + +warnings.warn("`root_module.memory` module has been renamed to `core.memory` since v0.6.0." + " The deprecated module name will be removed in v0.8.0.", DeprecationWarning) + +from pytorch_lightning.core.memory import * # noqa: F403 diff --git a/pytorch_lightning/root_module/model_saving.py b/pytorch_lightning/root_module/model_saving.py new file mode 100644 index 0000000000000..5af97abf2d6b6 --- /dev/null +++ b/pytorch_lightning/root_module/model_saving.py @@ -0,0 +1,11 @@ +""" +.. warning:: `root_module.model_saving` module has been renamed to `core.saving` since v0.6.0. + The deprecated module name will be removed in v0.8.0. +""" + +import warnings + +warnings.warn("`root_module.model_saving` module has been renamed to `core.saving` since v0.6.0." + " The deprecated module name will be removed in v0.8.0.", DeprecationWarning) + +from pytorch_lightning.core.saving import * # noqa: F403 diff --git a/pytorch_lightning/root_module/root_module.py b/pytorch_lightning/root_module/root_module.py new file mode 100644 index 0000000000000..4dd472478e7b3 --- /dev/null +++ b/pytorch_lightning/root_module/root_module.py @@ -0,0 +1,11 @@ +""" +.. warning:: `root_module.root_module` module has been renamed to `core.lightning` since v0.6.0. + The deprecated module name will be removed in v0.8.0. +""" + +import warnings + +warnings.warn("`root_module.root_module` module has been renamed to `core.lightning` since v0.6.0." + " The deprecated module name will be removed in v0.8.0.", DeprecationWarning) + +from pytorch_lightning.core.lightning import * # noqa: F403 diff --git a/pytorch_lightning/trainer/__init__.py b/pytorch_lightning/trainer/__init__.py index f2e0125b06271..f8470b0ec7e90 100644 --- a/pytorch_lightning/trainer/__init__.py +++ b/pytorch_lightning/trainer/__init__.py @@ -423,7 +423,7 @@ def on_train_end(self): min_nb_epochs: .. warning:: deprecated:: 0.5.0 - Use `min_nb_epochs` instead. Will remove 0.8.0. + Use `min_epochs` instead. Will remove 0.8.0. max_steps ^^^^^^^^^ diff --git a/pytorch_lightning/trainer/deprecated_api.py b/pytorch_lightning/trainer/deprecated_api.py new file mode 100644 index 0000000000000..08f5e449eabc4 --- /dev/null +++ b/pytorch_lightning/trainer/deprecated_api.py @@ -0,0 +1,87 @@ +"""Mirroring deprecated API""" + +import warnings +from abc import ABC + + +class TrainerDeprecatedAPITillVer0_8(ABC): + + def __init__(self): + super().__init__() # mixin calls super too + + @property + def nb_gpu_nodes(self): + """Back compatibility, will be removed in v0.8.0""" + warnings.warn("Attribute `nb_gpu_nodes` has renamed to `num_nodes` since v0.5.0" + " and this method will be removed in v0.8.0", DeprecationWarning) + return self.num_nodes + + @property + def num_gpu_nodes(self): + """Back compatibility, will be removed in v0.8.0""" + warnings.warn("Attribute `num_gpu_nodes` has renamed to `num_nodes` since v0.5.0" + " and this method will be removed in v0.8.0", DeprecationWarning) + return self.num_nodes + + @num_gpu_nodes.setter + def num_gpu_nodes(self, num_nodes): + """Back compatibility, will be removed in v0.8.0""" + warnings.warn("Attribute `num_gpu_nodes` has renamed to `num_nodes` since v0.5.0" + " and this method will be removed in v0.8.0", DeprecationWarning) + self.num_nodes = num_nodes + + @property + def gradient_clip(self): + """Back compatibility, will be removed in v0.8.0""" + warnings.warn("Attribute `gradient_clip` has renamed to `gradient_clip_val` since v0.5.0" + " and this method will be removed in v0.8.0", DeprecationWarning) + return self.gradient_clip_val + + @gradient_clip.setter + def gradient_clip(self, gradient_clip): + """Back compatibility, will be removed in v0.8.0""" + warnings.warn("Attribute `gradient_clip` has renamed to `gradient_clip_val` since v0.5.0" + " and this method will be removed in v0.8.0", DeprecationWarning) + self.gradient_clip_val = gradient_clip + + @property + def max_nb_epochs(self): + """Back compatibility, will be removed in v0.8.0""" + warnings.warn("Attribute `max_nb_epochs` has renamed to `max_epochs` since v0.5.0" + " and this method will be removed in v0.8.0", DeprecationWarning) + return self.max_epochs + + @max_nb_epochs.setter + def max_nb_epochs(self, max_epochs): + """Back compatibility, will be removed in v0.8.0""" + warnings.warn("Attribute `max_nb_epochs` has renamed to `max_epochs` since v0.5.0" + " and this method will be removed in v0.8.0", DeprecationWarning) + self.max_epochs = max_epochs + + @property + def min_nb_epochs(self): + """Back compatibility, will be removed in v0.8.0""" + warnings.warn("Attribute `min_nb_epochs` has renamed to `min_epochs` since v0.5.0" + " and this method will be removed in v0.8.0", DeprecationWarning) + return self.min_epochs + + @min_nb_epochs.setter + def min_nb_epochs(self, min_epochs): + """Back compatibility, will be removed in v0.8.0""" + warnings.warn("Attribute `min_nb_epochs` has renamed to `min_epochs` since v0.5.0" + " and this method will be removed in v0.8.0", DeprecationWarning) + self.min_epochs = min_epochs + + @property + def nb_sanity_val_steps(self): + """Back compatibility, will be removed in v0.8.0""" + warnings.warn("Attribute `nb_sanity_val_steps` has renamed to `num_sanity_val_steps` since v0.5.0" + " and this method will be removed in v0.8.0", DeprecationWarning) + return self.num_sanity_val_steps + + @nb_sanity_val_steps.setter + def nb_sanity_val_steps(self, nb): + """Back compatibility, will be removed in v0.8.0""" + warnings.warn("Attribute `nb_sanity_val_steps` has renamed to `num_sanity_val_steps` since v0.5.0" + " and this method will be removed in v0.8.0", DeprecationWarning) + self.num_sanity_val_steps = nb diff --git a/pytorch_lightning/trainer/trainer.py b/pytorch_lightning/trainer/trainer.py index a9bd4fbdcd78d..506ec531a5cfd 100644 --- a/pytorch_lightning/trainer/trainer.py +++ b/pytorch_lightning/trainer/trainer.py @@ -26,13 +26,14 @@ determine_root_gpu_device ) from pytorch_lightning.core.lightning import LightningModule +from pytorch_lightning.trainer.callback_hook import TrainerCallbackHookMixin +from pytorch_lightning.trainer.deprecated_api import TrainerDeprecatedAPITillVer0_8 from pytorch_lightning.trainer.evaluation_loop import TrainerEvaluationLoopMixin from pytorch_lightning.trainer.logging import TrainerLoggingMixin from pytorch_lightning.trainer.model_hooks import TrainerModelHooksMixin from pytorch_lightning.trainer.training_io import TrainerIOMixin from pytorch_lightning.trainer.training_loop import TrainerTrainLoopMixin from pytorch_lightning.trainer.training_tricks import TrainerTrainingTricksMixin -from pytorch_lightning.trainer.callback_hook import TrainerCallbackHookMixin from pytorch_lightning.utilities.debugging import MisconfigurationException from pytorch_lightning.profiler import Profiler, PassThroughProfiler from pytorch_lightning.callbacks import Callback @@ -55,19 +56,21 @@ XLA_AVAILABLE = True -class Trainer(TrainerIOMixin, - TrainerDPMixin, - TrainerDDPMixin, - TrainerLoggingMixin, - TrainerModelHooksMixin, - TrainerTrainingTricksMixin, - TrainerDataLoadingMixin, - TrainerAMPMixin, - TrainerEvaluationLoopMixin, - TrainerTrainLoopMixin, - TrainerCallbackConfigMixin, - TrainerCallbackHookMixin - ): +class Trainer( + TrainerIOMixin, + TrainerDPMixin, + TrainerDDPMixin, + TrainerLoggingMixin, + TrainerModelHooksMixin, + TrainerTrainingTricksMixin, + TrainerDataLoadingMixin, + TrainerAMPMixin, + TrainerEvaluationLoopMixin, + TrainerTrainLoopMixin, + TrainerCallbackConfigMixin, + TrainerCallbackHookMixin, + TrainerDeprecatedAPITillVer0_8, +): def __init__( self, @@ -105,7 +108,7 @@ def __init__( row_log_interval: int = 10, add_row_log_interval=None, # backward compatible, todo: remove in v0.8.0 distributed_backend: Optional[str] = None, - use_amp=False, # backward compatible, todo: remove in v0.8.0 + use_amp=False, # backward compatible, todo: remove in v0.9.0 precision: int = 32, print_nan_grads: bool = False, weights_summary: str = 'full', @@ -202,7 +205,7 @@ def __init__( use_amp: .. warning:: .. deprecated:: 0.7.0 - Use `precision` instead. Will remove 0.8.0. + Use `precision` instead. Will remove 0.9.0. precision: Full precision (32), half precision (16). @@ -241,23 +244,20 @@ def __init__( torch.backends.cudnn.benchmark = True # Transfer params - # Backward compatibility self.num_nodes = num_nodes + # Backward compatibility, TODO: remove in v0.8.0 if nb_gpu_nodes is not None: - warnings.warn("`nb_gpu_nodes` has renamed to `num_nodes` since v0.5.0" + warnings.warn("Argument `nb_gpu_nodes` has renamed to `num_nodes` since v0.5.0" " and this method will be removed in v0.8.0", DeprecationWarning) - if not num_nodes: # in case you did not set the proper value - num_nodes = nb_gpu_nodes - self.num_gpu_nodes = num_nodes + self.num_gpu_nodes = nb_gpu_nodes self.log_gpu_memory = log_gpu_memory - # Backward compatibility + self.gradient_clip_val = gradient_clip_val + # Backward compatibility, TODO: remove in v0.8.0 if gradient_clip is not None: - warnings.warn("`gradient_clip` has renamed to `gradient_clip_val` since v0.5.0" + warnings.warn("Argument `gradient_clip` has renamed to `gradient_clip_val` since v0.5.0" " and this method will be removed in v0.8.0", DeprecationWarning) - if not gradient_clip_val: # in case you did not set the proper value - gradient_clip_val = gradient_clip - self.gradient_clip_val = gradient_clip_val + self.gradient_clip = gradient_clip self.reload_dataloaders_every_epoch = reload_dataloaders_every_epoch self.progress_bar_refresh_rate = progress_bar_refresh_rate @@ -273,33 +273,29 @@ def __init__( self.process_position = process_position self.weights_summary = weights_summary - # Backward compatibility + self.max_epochs = max_epochs + # Backward compatibility, TODO: remove in v0.8.0 if max_nb_epochs is not None: - warnings.warn("`max_nb_epochs` has renamed to `max_epochs` since v0.5.0" + warnings.warn("Argument `max_nb_epochs` has renamed to `max_epochs` since v0.5.0" " and this method will be removed in v0.8.0", DeprecationWarning) - if not max_epochs: # in case you did not set the proper value - max_epochs = max_nb_epochs - self.max_epochs = max_epochs + self.max_nb_epochs = max_nb_epochs - # Backward compatibility + self.min_epochs = min_epochs + # Backward compatibility, TODO: remove in v0.8.0 if min_nb_epochs is not None: - warnings.warn("`min_nb_epochs` has renamed to `min_epochs` since v0.5.0" + warnings.warn("Argument `min_nb_epochs` has renamed to `min_epochs` since v0.5.0" " and this method will be removed in v0.8.0", DeprecationWarning) - if not min_epochs: # in case you did not set the proper value - min_epochs = min_nb_epochs - self.min_epochs = min_epochs + self.min_nb_epochs = min_nb_epochs self.max_steps = max_steps self.min_steps = min_steps - # Backward compatibility + self.num_sanity_val_steps = num_sanity_val_steps + # Backward compatibility, TODO: remove in v0.8.0 if nb_sanity_val_steps is not None: - warnings.warn("`nb_sanity_val_steps` has renamed to `num_sanity_val_steps` since v0.5.0" + warnings.warn("Argument `nb_sanity_val_steps` has renamed to `num_sanity_val_steps` since v0.5.0" " and this method will be removed in v0.8.0", DeprecationWarning) - if not num_sanity_val_steps: # in case you did not set the proper value - num_sanity_val_steps = nb_sanity_val_steps - - self.num_sanity_val_steps = num_sanity_val_steps + self.nb_sanity_val_steps = nb_sanity_val_steps self.print_nan_grads = print_nan_grads self.truncated_bptt_steps = truncated_bptt_steps self.resume_from_checkpoint = resume_from_checkpoint @@ -380,7 +376,7 @@ def __init__( self.use_dp = False self.single_gpu = False self.distributed_backend = distributed_backend - self.set_distributed_mode(distributed_backend, num_nodes) + self.set_distributed_mode(distributed_backend, self.num_nodes) # override dist backend when using tpus if self.on_tpu: @@ -391,7 +387,7 @@ def __init__( self.proc_rank = 0 self.world_size = 1 self.node_rank = 0 - self.configure_slurm_ddp(num_nodes) + self.configure_slurm_ddp(self.num_nodes) # nvidia setup self.set_nvidia_flags(self.is_slurm_managing_tasks, self.data_parallel_device_ids) @@ -423,7 +419,7 @@ def __init__( assert self.precision in (16, 32), 'only 32 or 16 bit precision supported' - if self.precision == 16 and num_tpu_cores is None: + if self.precision == 16 and self.num_tpu_cores is None: use_amp = True self.init_amp(use_amp) diff --git a/pytorch_lightning/trainer/training_loop.py b/pytorch_lightning/trainer/training_loop.py index 4d650be42d591..bb02b90f10509 100644 --- a/pytorch_lightning/trainer/training_loop.py +++ b/pytorch_lightning/trainer/training_loop.py @@ -215,24 +215,6 @@ class TrainerTrainLoopMixin(ABC): on_epoch_end: Callable on_validation_end: Callable - @property - def max_nb_epochs(self): - """ - .. warning:: `max_nb_epochs` is deprecated and will be removed in v0.8.0, use `max_epochs` instead. - """ - warnings.warn("`max_nb_epochs` is deprecated and will be removed in " - "v0.8.0, use `max_epochs` instead.", DeprecationWarning) - return self.max_epochs - - @property - def min_nb_epochs(self): - """ - .. warning:: `min_nb_epochs` is deprecated and will be removed in v0.8.0, use `min_epochs` instead. - """ - warnings.warn("`min_nb_epochs` is deprecated and will be removed in " - "v0.8.0, use `min_epochs` instead.", DeprecationWarning) - return self.min_epochs - @abstractmethod def get_model(self): """Warning: this is just empty shell for code implemented in other class.""" diff --git a/tests/test_deprecated.py b/tests/test_deprecated.py new file mode 100644 index 0000000000000..b014c5a29bdaf --- /dev/null +++ b/tests/test_deprecated.py @@ -0,0 +1,55 @@ +"""Test deprecated functionality which will be removed in vX.Y.Z""" + +from pytorch_lightning import Trainer + + +def test_to_be_removed_in_v0_8_0_module_imports(): + from pytorch_lightning.logging.comet_logger import CometLogger # noqa: F811 + from pytorch_lightning.logging.mlflow_logger import MLFlowLogger # noqa: F811 + from pytorch_lightning.logging.test_tube_logger import TestTubeLogger # noqa: F811 + + from pytorch_lightning.pt_overrides.override_data_parallel import ( # noqa: F811 + LightningDataParallel, LightningDistributedDataParallel) + from pytorch_lightning.overrides.override_data_parallel import ( # noqa: F811 + LightningDataParallel, LightningDistributedDataParallel) + + from pytorch_lightning.core.model_saving import ModelIO # noqa: F811 + from pytorch_lightning.core.root_module import LightningModule # noqa: F811 + + from pytorch_lightning.root_module.decorators import data_loader # noqa: F811 + from pytorch_lightning.root_module.grads import GradInformation # noqa: F811 + from pytorch_lightning.root_module.hooks import ModelHooks # noqa: F811 + from pytorch_lightning.root_module.memory import ModelSummary # noqa: F811 + from pytorch_lightning.root_module.model_saving import ModelIO # noqa: F811 + from pytorch_lightning.root_module.root_module import LightningModule # noqa: F811 + + +def test_to_be_removed_in_v0_8_0_trainer(): + mapping_old_new = { + 'gradient_clip': 'gradient_clip_val', + 'nb_gpu_nodes': 'num_nodes', + 'max_nb_epochs': 'max_epochs', + 'min_nb_epochs': 'min_epochs', + 'nb_sanity_val_steps': 'num_sanity_val_steps', + } + # skip 0 since it may be interested as False + kwargs = {k: (i + 1) for i, k in enumerate(mapping_old_new)} + + trainer = Trainer(**kwargs) + + for attr_old in mapping_old_new: + attr_new = mapping_old_new[attr_old] + assert kwargs[attr_old] == getattr(trainer, attr_old), \ + 'Missing deprecated attribute "%s"' % attr_old + assert kwargs[attr_old] == getattr(trainer, attr_new), \ + 'Wrongly passed deprecated argument "%s" to attribute "%s"' % (attr_old, attr_new) + + +def test_to_be_removed_in_v0_9_0_module_imports(): + from pytorch_lightning.core.decorators import data_loader # noqa: F811 + + from pytorch_lightning.logging.comet import CometLogger # noqa: F402 + from pytorch_lightning.logging.mlflow import MLFlowLogger # noqa: F402 + from pytorch_lightning.logging.neptune import NeptuneLogger # noqa: F402 + from pytorch_lightning.logging.test_tube import TestTubeLogger # noqa: F402 + from pytorch_lightning.logging.wandb import WandbLogger # noqa: F402