Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Deprecate callback hooks on_pretrain_routine_start and on_pretrain_routine_end #11794

Merged
merged 43 commits into from Feb 24, 2022
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
Show all changes
43 commits
Select commit Hold shift + click to select a range
32d7c2b
init commit
krishnakalyan3 Feb 7, 2022
5d33ba2
feedback based changes
krishnakalyan3 Feb 7, 2022
1142c92
adress PR comments
krishnakalyan3 Feb 9, 2022
d3d743e
[pre-commit.ci] auto fixes from pre-commit.com hooks
pre-commit-ci[bot] Feb 9, 2022
9014b11
trainer changes
krishnakalyan3 Feb 9, 2022
e6d28f5
Merge branch 'deprecate-pretrain_routine' of github.com:krishnakalyan…
krishnakalyan3 Feb 9, 2022
f0282db
update changelog
krishnakalyan3 Feb 9, 2022
efa6ef6
validations
krishnakalyan3 Feb 9, 2022
ad1751b
alternative hooks update
krishnakalyan3 Feb 9, 2022
c1c2eba
revert changes
krishnakalyan3 Feb 9, 2022
3e480b7
revert hooks
krishnakalyan3 Feb 9, 2022
f6c2f05
revert hook
krishnakalyan3 Feb 9, 2022
8779cc5
revert changes for hooks
krishnakalyan3 Feb 9, 2022
e7f3ad5
remove from logging
krishnakalyan3 Feb 9, 2022
ca74752
remove comments
krishnakalyan3 Feb 9, 2022
72c5f2a
init commit
krishnakalyan3 Feb 7, 2022
9d8ae14
feedback based changes
krishnakalyan3 Feb 7, 2022
53887ee
adress PR comments
krishnakalyan3 Feb 9, 2022
fe5efa2
[pre-commit.ci] auto fixes from pre-commit.com hooks
pre-commit-ci[bot] Feb 9, 2022
e2172fc
trainer changes
krishnakalyan3 Feb 9, 2022
71b14aa
rebase and commit
krishnakalyan3 Feb 10, 2022
437710b
[pre-commit.ci] auto fixes from pre-commit.com hooks
pre-commit-ci[bot] Feb 10, 2022
53535f8
rebased validator
krishnakalyan3 Feb 10, 2022
7e9451b
Merge branch 'deprecate-pretrain_routine' of github.com:krishnakalyan…
krishnakalyan3 Feb 10, 2022
e741632
rebase again
krishnakalyan3 Feb 10, 2022
0311369
fix ci error by importing optional
krishnakalyan3 Feb 15, 2022
d779a25
[pre-commit.ci] auto fixes from pre-commit.com hooks
pre-commit-ci[bot] Feb 15, 2022
47b3e82
remove bc breaking changes
krishnakalyan3 Feb 15, 2022
1be6952
Merge branch 'deprecate-pretrain_routine' of github.com:krishnakalyan…
krishnakalyan3 Feb 15, 2022
216ac95
changes according to suggestions
krishnakalyan3 Feb 15, 2022
1b2763a
update unit tests
krishnakalyan3 Feb 16, 2022
a163793
unit tests updated
krishnakalyan3 Feb 16, 2022
73e6c68
update test restore
krishnakalyan3 Feb 16, 2022
b7f5252
remove to fix unit test
krishnakalyan3 Feb 16, 2022
f843d87
Merge branch 'master' of github.com:krishnakalyan3/pytorch-lightning
krishnakalyan3 Feb 19, 2022
3c40f9c
update merge
krishnakalyan3 Feb 19, 2022
fe14dec
fix the deprecations
rohitgr7 Feb 24, 2022
86574e2
fix the deprecations
rohitgr7 Feb 24, 2022
88dc0ab
fix the deprecations
rohitgr7 Feb 24, 2022
92586bc
add deprecation test
rohitgr7 Feb 24, 2022
1175c8a
Merge branch 'master' into deprecate-pretrain_routine
rohitgr7 Feb 24, 2022
75da9fa
Apply suggestions from code review
rohitgr7 Feb 24, 2022
41f5489
Update pytorch_lightning/callbacks/base.py
rohitgr7 Feb 24, 2022
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
16 changes: 14 additions & 2 deletions pytorch_lightning/callbacks/base.py
Expand Up @@ -231,10 +231,22 @@ def on_train_end(self, trainer: "pl.Trainer", pl_module: "pl.LightningModule") -
"""Called when the train ends."""

def on_pretrain_routine_start(self, trainer: "pl.Trainer", pl_module: "pl.LightningModule") -> None:
"""Called when the pretrain routine begins."""
r"""
.. deprecated:: v1.6
This callback hook was deprecated in v1.6 and will be removed in v1.8. Use
``on_fit_start`` or ``setup`` instead.

Called when the pretrain routine begins.
"""

def on_pretrain_routine_end(self, trainer: "pl.Trainer", pl_module: "pl.LightningModule") -> None:
"""Called when the pretrain routine ends."""
r"""
.. deprecated:: v1.6
This callback hook was deprecated in v1.6 and will be removed in v1.8. Use
``on_fit_start`` or ``setup`` instead.

Called when the pretrain routine ends.
"""

def on_validation_start(self, trainer: "pl.Trainer", pl_module: "pl.LightningModule") -> None:
"""Called when the validation loop begins."""
Expand Down
2 changes: 0 additions & 2 deletions pytorch_lightning/callbacks/lambda_function.py
Expand Up @@ -68,8 +68,6 @@ def __init__(
on_batch_end: Optional[Callable] = None,
on_train_start: Optional[Callable] = None,
on_train_end: Optional[Callable] = None,
on_pretrain_routine_start: Optional[Callable] = None,
on_pretrain_routine_end: Optional[Callable] = None,
krishnakalyan3 marked this conversation as resolved.
Show resolved Hide resolved
on_validation_start: Optional[Callable] = None,
on_validation_end: Optional[Callable] = None,
on_test_start: Optional[Callable] = None,
Expand Down
11 changes: 5 additions & 6 deletions pytorch_lightning/callbacks/model_checkpoint.py
Expand Up @@ -248,19 +248,18 @@ def state_key(self) -> str:
)

def setup(self, trainer: "pl.Trainer", pl_module: "pl.LightningModule", stage: Optional[str] = None) -> None:
# When pretrain routine starts we build the ckpt dir on the fly.
krishnakalyan3 marked this conversation as resolved.
Show resolved Hide resolved
self.__resolve_ckpt_dir(trainer)
if trainer.is_global_zero:
self.__warn_if_dir_not_empty(self.dirpath)

# NOTE: setting these attributes needs to happen as early as possible BEFORE reloading callback states,
# because the attributes are part of the state_key which needs to be fully defined before reloading.
if self._save_on_train_epoch_end is None:
# if the user runs validation multiple times per training epoch or multiple training epochs without
# validation, then we run after validation instead of on train epoch end
self._save_on_train_epoch_end = trainer.val_check_interval == 1.0 and trainer.check_val_every_n_epoch == 1

def on_pretrain_routine_start(self, trainer: "pl.Trainer", pl_module: "pl.LightningModule") -> None:
"""When pretrain routine starts we build the ckpt dir on the fly."""
self.__resolve_ckpt_dir(trainer)
if trainer.is_global_zero:
self.__warn_if_dir_not_empty(self.dirpath)

def on_train_start(self, trainer: "pl.Trainer", pl_module: "pl.LightningModule") -> None:
self._last_time_checked = time.monotonic()

Expand Down
2 changes: 1 addition & 1 deletion pytorch_lightning/callbacks/model_summary.py
Expand Up @@ -49,7 +49,7 @@ class ModelSummary(Callback):
def __init__(self, max_depth: int = 1) -> None:
self._max_depth: int = max_depth

def on_pretrain_routine_start(self, trainer: "pl.Trainer", pl_module: "pl.LightningModule") -> None:
def setup(self, trainer: "pl.Trainer", pl_module: "pl.LightningModule") -> None:
carmocca marked this conversation as resolved.
Show resolved Hide resolved
if not self._max_depth:
return None

Expand Down
14 changes: 6 additions & 8 deletions pytorch_lightning/core/hooks.py
Expand Up @@ -64,19 +64,17 @@ def on_predict_end(self) -> None:
def on_pretrain_routine_start(self) -> None:
"""Called at the beginning of the pretrain routine (between fit and train start).

- fit
krishnakalyan3 marked this conversation as resolved.
Show resolved Hide resolved
- pretrain_routine start
krishnakalyan3 marked this conversation as resolved.
Show resolved Hide resolved
- pretrain_routine end
- training_start
.. deprecated:: v1.6
:meth:`on_val_dataloader` is deprecated and will be removed in v1.8.0.
krishnakalyan3 marked this conversation as resolved.
Show resolved Hide resolved
Please use :meth:`on_fit_start` or :meth:`setup` directly.
"""

def on_pretrain_routine_end(self) -> None:
"""Called at the end of the pretrain routine (between fit and train start).

- fit
- pretrain_routine start
- pretrain_routine end
- training_start
.. deprecated:: v1.6
:meth:`on_val_dataloader` is deprecated and will be removed in v1.8.0.
krishnakalyan3 marked this conversation as resolved.
Show resolved Hide resolved
Please use :meth:`on_fit_start` or :meth:`setup` directly.
"""

def on_train_batch_start(self, batch: Any, batch_idx: int, unused: int = 0) -> Optional[int]:
Expand Down
9 changes: 0 additions & 9 deletions pytorch_lightning/trainer/trainer.py
Expand Up @@ -1281,15 +1281,6 @@ def _pre_training_routine(self):
# register signals
self._signal_connector.register_signal_handlers()

# --------------------------
# Pre-train
# --------------------------
self._call_callback_hooks("on_pretrain_routine_start")
self._call_lightning_module_hook("on_pretrain_routine_start")

self._call_callback_hooks("on_pretrain_routine_end")
self._call_lightning_module_hook("on_pretrain_routine_end")

krishnakalyan3 marked this conversation as resolved.
Show resolved Hide resolved
def _run_train(self) -> None:
self._pre_training_routine()

Expand Down