Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -14,9 +14,9 @@ class GradientAccumulationScheduler(Callback):
Change gradient accumulation factor according to scheduling.

Args:
scheduling (dict): scheduling in format {epoch: accumulation_factor}
.. warning:: Epochs indexing starts from "1" until v0.6.x, but will start from "0" in
v0.8.0.
scheduling: scheduling in format {epoch: accumulation_factor}
.. warning:: Epochs indexing starts from "1" until v0.6.x,
but will start from "0" in v0.8.0.

Example::

Expand Down
2 changes: 1 addition & 1 deletion pytorch_lightning/core/model_saving.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,4 +8,4 @@
warnings.warn("`model_saving` module has been renamed to `saving` since v0.6.0."
" The deprecated module name will be removed in v0.8.0.", DeprecationWarning)

from pytorch_lightning.core.saving import ModelIO # noqa: E402
from pytorch_lightning.core.saving import * # noqa: F403
2 changes: 2 additions & 0 deletions pytorch_lightning/core/root_module.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,5 +5,7 @@

import warnings

from pytorch_lightning.core.lightning import * # noqa: F403

warnings.warn("`root_module` module has been renamed to `lightning` since v0.6.0."
" The deprecated module name will be removed in v0.8.0.", DeprecationWarning)
9 changes: 4 additions & 5 deletions pytorch_lightning/loggers/comet.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,8 +5,9 @@
CometLogger
-------------
"""

import logging as log
from argparse import Namespace
from logging import getLogger
from typing import Optional, Dict, Union, Any

try:
Expand All @@ -29,8 +30,6 @@
from pytorch_lightning.utilities.debugging import MisconfigurationException
from .base import LightningLoggerBase, rank_zero_only

logger = getLogger(__name__)


class CometLogger(LightningLoggerBase):
r"""
Expand Down Expand Up @@ -99,7 +98,7 @@ def __init__(self, api_key: Optional[str] = None, save_dir: Optional[str] = None
# If neither api_key nor save_dir are passed as arguments, raise an exception
raise MisconfigurationException("CometLogger requires either api_key or save_dir during initialization.")

logger.info(f"CometLogger will be initialized in {self.mode} mode")
log.info(f"CometLogger will be initialized in {self.mode} mode")

self.workspace = workspace
self.project_name = project_name
Expand All @@ -118,7 +117,7 @@ def __init__(self, api_key: Optional[str] = None, save_dir: Optional[str] = None
try:
self.name = experiment_name
except TypeError as e:
logger.exception("Failed to set experiment name for comet.ml logger")
log.exception("Failed to set experiment name for comet.ml logger")

@property
def experiment(self) -> CometBaseExperiment:
Expand Down
8 changes: 3 additions & 5 deletions pytorch_lightning/loggers/mlflow.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,8 +23,8 @@ def any_lightning_module_function_or_hook(...):
self.logger.experiment.whatever_ml_flow_supports(...)

"""
import logging as log
from argparse import Namespace
from logging import getLogger
from time import time
from typing import Optional, Dict, Any, Union

Expand All @@ -36,8 +36,6 @@ def any_lightning_module_function_or_hook(...):

from .base import LightningLoggerBase, rank_zero_only

logger = getLogger(__name__)


class MLFlowLogger(LightningLoggerBase):
def __init__(self, experiment_name: str, tracking_uri: Optional[str] = None,
Expand Down Expand Up @@ -80,7 +78,7 @@ def run_id(self):
if expt:
self._expt_id = expt.experiment_id
else:
logger.warning(f'Experiment with name {self.experiment_name} not found. Creating it.')
log.warning(f'Experiment with name {self.experiment_name} not found. Creating it.')
self._expt_id = self._mlflow_client.create_experiment(name=self.experiment_name)

run = self._mlflow_client.create_run(experiment_id=self._expt_id, tags=self.tags)
Expand All @@ -98,7 +96,7 @@ def log_metrics(self, metrics: Dict[str, float], step: Optional[int] = None) ->
timestamp_ms = int(time() * 1000)
for k, v in metrics.items():
if isinstance(v, str):
logger.warning(f'Discarding metric with string value {k}={v}.')
log.warning(f'Discarding metric with string value {k}={v}.')
continue
self.experiment.log_metric(self.run_id, k, v, timestamp_ms, step)

Expand Down
6 changes: 2 additions & 4 deletions pytorch_lightning/loggers/neptune.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,8 +6,8 @@
NeptuneLogger
--------------
"""
import logging as log
from argparse import Namespace
from logging import getLogger
from typing import Optional, List, Dict, Any, Union, Iterable

try:
Expand All @@ -22,8 +22,6 @@

from pytorch_lightning.loggers.base import LightningLoggerBase, rank_zero_only

logger = getLogger(__name__)


class NeptuneLogger(LightningLoggerBase):
r"""
Expand Down Expand Up @@ -138,7 +136,7 @@ def any_lightning_module_function_or_hook(...):
neptune.init(api_token=self.api_key,
project_qualified_name=self.project_name)

logger.info(f'NeptuneLogger was initialized in {self.mode} mode')
log.info(f'NeptuneLogger was initialized in {self.mode} mode')

@property
def experiment(self) -> Experiment:
Expand Down
2 changes: 1 addition & 1 deletion pytorch_lightning/logging/comet.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,4 +2,4 @@
.. warning:: `logging` package has been renamed to `loggers` since v0.7.0 and will be removed in v0.9.0
"""

from pytorch_lightning.loggers import comet # noqa: F403
from pytorch_lightning.loggers.comet import CometLogger # noqa: F403
2 changes: 1 addition & 1 deletion pytorch_lightning/logging/mlflow.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,4 +2,4 @@
.. warning:: `logging` package has been renamed to `loggers` since v0.7.0 and will be removed in v0.9.0
"""

from pytorch_lightning.loggers import mlflow # noqa: F403
from pytorch_lightning.loggers.mlflow import MLFlowLogger # noqa: F403
2 changes: 1 addition & 1 deletion pytorch_lightning/logging/neptune.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,4 +2,4 @@
.. warning:: `logging` package has been renamed to `loggers` since v0.7.0 and will be removed in v0.9.0
"""

from pytorch_lightning.loggers import neptune # noqa: F403
from pytorch_lightning.loggers.neptune import NeptuneLogger # noqa: F403
2 changes: 1 addition & 1 deletion pytorch_lightning/logging/test_tube.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,4 +2,4 @@
.. warning:: `logging` package has been renamed to `loggers` since v0.7.0 and will be removed in v0.9.0
"""

from pytorch_lightning.loggers import test_tube # noqa: F403
from pytorch_lightning.loggers.test_tube import TestTubeLogger # noqa: F403
2 changes: 1 addition & 1 deletion pytorch_lightning/logging/wandb.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,4 +2,4 @@
.. warning:: `logging` package has been renamed to `loggers` since v0.7.0 and will be removed in v0.9.0
"""

from pytorch_lightning.loggers import wandb # noqa: F403
from pytorch_lightning.loggers.wandb import WandbLogger # noqa: F403
2 changes: 0 additions & 2 deletions pytorch_lightning/pt_overrides/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,5 +7,3 @@

warnings.warn("`pt_overrides` package has been renamed to `overrides` since v0.6.0."
" The deprecated module name will be removed in v0.8.0.", DeprecationWarning)

from pytorch_lightning.overrides import override_data_parallel # noqa: E402
12 changes: 12 additions & 0 deletions pytorch_lightning/pt_overrides/override_data_parallel.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
"""
.. warning:: `override_data_parallel` module has been renamed to `data_parallel` since v0.6.0.
The deprecated module name will be removed in v0.8.0.
"""

import warnings

warnings.warn("`override_data_parallel` module has been renamed to `data_parallel` since v0.6.0."
" The deprecated module name will be removed in v0.8.0.", DeprecationWarning)

from pytorch_lightning.overrides.data_parallel import ( # noqa: F402
get_a_var, parallel_apply, LightningDataParallel, LightningDistributedDataParallel)
11 changes: 11 additions & 0 deletions pytorch_lightning/root_module/decorators.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
"""
.. warning:: `root_module.decorators` module has been renamed to `core.decorators` since v0.6.0.
The deprecated module name will be removed in v0.8.0.
"""

import warnings

warnings.warn("`root_module.decorators` module has been renamed to `core.decorators` since v0.6.0."
" The deprecated module name will be removed in v0.8.0.", DeprecationWarning)

from pytorch_lightning.core.decorators import * # noqa: F403
11 changes: 11 additions & 0 deletions pytorch_lightning/root_module/grads.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
"""
.. warning:: `root_module.grads` module has been renamed to `core.grads` since v0.6.0.
The deprecated module name will be removed in v0.8.0.
"""

import warnings

warnings.warn("`root_module.grads` module has been renamed to `core.grads` since v0.6.0."
" The deprecated module name will be removed in v0.8.0.", DeprecationWarning)

from pytorch_lightning.core.grads import * # noqa: F403
11 changes: 11 additions & 0 deletions pytorch_lightning/root_module/hooks.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
"""
.. warning:: `root_module.hooks` module has been renamed to `core.hooks` since v0.6.0.
The deprecated module name will be removed in v0.8.0.
"""

import warnings

warnings.warn("`root_module.hooks` module has been renamed to `core.hooks` since v0.6.0."
" The deprecated module name will be removed in v0.8.0.", DeprecationWarning)

from pytorch_lightning.core.hooks import * # noqa: F403
11 changes: 11 additions & 0 deletions pytorch_lightning/root_module/memory.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
"""
.. warning:: `root_module.memory` module has been renamed to `core.memory` since v0.6.0.
The deprecated module name will be removed in v0.8.0.
"""

import warnings

warnings.warn("`root_module.memory` module has been renamed to `core.memory` since v0.6.0."
" The deprecated module name will be removed in v0.8.0.", DeprecationWarning)

from pytorch_lightning.core.memory import * # noqa: F403
11 changes: 11 additions & 0 deletions pytorch_lightning/root_module/model_saving.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
"""
.. warning:: `root_module.model_saving` module has been renamed to `core.saving` since v0.6.0.
The deprecated module name will be removed in v0.8.0.
"""

import warnings

warnings.warn("`root_module.model_saving` module has been renamed to `core.saving` since v0.6.0."
" The deprecated module name will be removed in v0.8.0.", DeprecationWarning)

from pytorch_lightning.core.saving import * # noqa: F403
11 changes: 11 additions & 0 deletions pytorch_lightning/root_module/root_module.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
"""
.. warning:: `root_module.root_module` module has been renamed to `core.lightning` since v0.6.0.
The deprecated module name will be removed in v0.8.0.
"""

import warnings

warnings.warn("`root_module.root_module` module has been renamed to `core.lightning` since v0.6.0."
" The deprecated module name will be removed in v0.8.0.", DeprecationWarning)

from pytorch_lightning.core.lightning import * # noqa: F403
2 changes: 1 addition & 1 deletion pytorch_lightning/trainer/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -423,7 +423,7 @@ def on_train_end(self):
min_nb_epochs:

.. warning:: deprecated:: 0.5.0
Use `min_nb_epochs` instead. Will remove 0.8.0.
Use `min_epochs` instead. Will remove 0.8.0.

max_steps
^^^^^^^^^
Expand Down
87 changes: 87 additions & 0 deletions pytorch_lightning/trainer/deprecated_api.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,87 @@
"""Mirroring deprecated API"""

import warnings
from abc import ABC


class TrainerDeprecatedAPITillVer0_8(ABC):

def __init__(self):
super().__init__() # mixin calls super too

@property
def nb_gpu_nodes(self):
"""Back compatibility, will be removed in v0.8.0"""
warnings.warn("Attribute `nb_gpu_nodes` has renamed to `num_nodes` since v0.5.0"
" and this method will be removed in v0.8.0", DeprecationWarning)
return self.num_nodes

@property
def num_gpu_nodes(self):
"""Back compatibility, will be removed in v0.8.0"""
warnings.warn("Attribute `num_gpu_nodes` has renamed to `num_nodes` since v0.5.0"
" and this method will be removed in v0.8.0", DeprecationWarning)
return self.num_nodes

@num_gpu_nodes.setter
def num_gpu_nodes(self, num_nodes):
"""Back compatibility, will be removed in v0.8.0"""
warnings.warn("Attribute `num_gpu_nodes` has renamed to `num_nodes` since v0.5.0"
" and this method will be removed in v0.8.0", DeprecationWarning)
self.num_nodes = num_nodes

@property
def gradient_clip(self):
"""Back compatibility, will be removed in v0.8.0"""
warnings.warn("Attribute `gradient_clip` has renamed to `gradient_clip_val` since v0.5.0"
" and this method will be removed in v0.8.0", DeprecationWarning)
return self.gradient_clip_val

@gradient_clip.setter
def gradient_clip(self, gradient_clip):
"""Back compatibility, will be removed in v0.8.0"""
warnings.warn("Attribute `gradient_clip` has renamed to `gradient_clip_val` since v0.5.0"
" and this method will be removed in v0.8.0", DeprecationWarning)
self.gradient_clip_val = gradient_clip

@property
def max_nb_epochs(self):
"""Back compatibility, will be removed in v0.8.0"""
warnings.warn("Attribute `max_nb_epochs` has renamed to `max_epochs` since v0.5.0"
" and this method will be removed in v0.8.0", DeprecationWarning)
return self.max_epochs

@max_nb_epochs.setter
def max_nb_epochs(self, max_epochs):
"""Back compatibility, will be removed in v0.8.0"""
warnings.warn("Attribute `max_nb_epochs` has renamed to `max_epochs` since v0.5.0"
" and this method will be removed in v0.8.0", DeprecationWarning)
self.max_epochs = max_epochs

@property
def min_nb_epochs(self):
"""Back compatibility, will be removed in v0.8.0"""
warnings.warn("Attribute `min_nb_epochs` has renamed to `min_epochs` since v0.5.0"
" and this method will be removed in v0.8.0", DeprecationWarning)
return self.min_epochs

@min_nb_epochs.setter
def min_nb_epochs(self, min_epochs):
"""Back compatibility, will be removed in v0.8.0"""
warnings.warn("Attribute `min_nb_epochs` has renamed to `min_epochs` since v0.5.0"
" and this method will be removed in v0.8.0", DeprecationWarning)
self.min_epochs = min_epochs

@property
def nb_sanity_val_steps(self):
"""Back compatibility, will be removed in v0.8.0"""
warnings.warn("Attribute `nb_sanity_val_steps` has renamed to `num_sanity_val_steps` since v0.5.0"
" and this method will be removed in v0.8.0", DeprecationWarning)
return self.num_sanity_val_steps

@nb_sanity_val_steps.setter
def nb_sanity_val_steps(self, nb):
"""Back compatibility, will be removed in v0.8.0"""
warnings.warn("Attribute `nb_sanity_val_steps` has renamed to `num_sanity_val_steps` since v0.5.0"
" and this method will be removed in v0.8.0", DeprecationWarning)
self.num_sanity_val_steps = nb
Loading