Skip to content

Commit

Permalink
Integrate lightning_utilities==0.4.2 (#15817)
Browse files Browse the repository at this point in the history
Co-authored-by: Jirka Borovec <6035284+Borda@users.noreply.github.com>
  • Loading branch information
2 people authored and awaelchli committed Dec 15, 2022
1 parent 7fc4da9 commit 8aa96d0
Show file tree
Hide file tree
Showing 31 changed files with 32 additions and 136 deletions.
2 changes: 1 addition & 1 deletion requirements/app/base.txt
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ fsspec>=2022.5.0, <=2022.7.1
croniter>=1.3.0, <1.4.0 # strict; TODO: for now until we find something more robust.
traitlets>=5.3.0, <=5.4.0
arrow>=1.2.0, <1.2.4
lightning-utilities>=0.3.0, !=0.4.0, <0.5.0
lightning-utilities>=0.4.2, <0.5.0
beautifulsoup4>=4.8.0, <4.11.2
inquirer>=2.10.0
psutil<5.9.4
Expand Down
6 changes: 4 additions & 2 deletions requirements/app/components.txt
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
# deps required by components in the lightning app repository (src/lightning_app/components)
lightning_api_access>=0.0.3
aiohttp>=3.8.0, <=3.8.3
lightning_api_access>=0.0.3 # serve
aiohttp>=3.8.0, <=3.8.3 # auto_scaler
lightning_lite # multinode
pytorch_lightning # multinode
2 changes: 0 additions & 2 deletions requirements/app/devel.txt
Original file line number Diff line number Diff line change
Expand Up @@ -12,5 +12,3 @@

# extended list of dependencies for UI
-r ./components.txt

-r ./examples.txt
1 change: 0 additions & 1 deletion requirements/app/examples.txt

This file was deleted.

2 changes: 1 addition & 1 deletion requirements/fabric/base.txt
Original file line number Diff line number Diff line change
Expand Up @@ -6,4 +6,4 @@ torch>=1.10.0, <=1.13.0
fsspec[http]>2021.06.0, <2022.6.0
packaging>=17.0, <=21.3
typing-extensions>=4.0.0, <=4.4.0
lightning-utilities>=0.3.0, !=0.4.0, <0.5.0
lightning-utilities>=0.4.2, <0.5.0
2 changes: 1 addition & 1 deletion requirements/pytorch/base.txt
Original file line number Diff line number Diff line change
Expand Up @@ -10,4 +10,4 @@ tensorboardX>=2.2, <=2.5.1 # min version is set by torch.onnx missing attribute
torchmetrics>=0.7.0, <0.10.1 # needed for using fixed compare_version
packaging>=17.0, <=21.3
typing-extensions>=4.0.0, <=4.4.0
lightning-utilities>=0.3.0, !=0.4.0, <0.5.0
lightning-utilities>=0.4.2, <0.5.0
2 changes: 1 addition & 1 deletion tests/tests_app/components/multi_node/test_base.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
from unittest import mock

import pytest
from tests_app.helpers.utils import no_warning_call
from lightning_utilities.test.warning import no_warning_call

from lightning_app import CloudCompute, LightningWork
from lightning_app.components import MultiNode
Expand Down
2 changes: 1 addition & 1 deletion tests/tests_app/components/multi_node/test_lite.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@

import pytest
from lightning_utilities.core.imports import module_available
from tests_app.helpers.utils import no_warning_call
from lightning_utilities.test.warning import no_warning_call

import lightning_fabric as lf
from lightning_app.components.multi_node.lite import _LiteRunExecutor
Expand Down
2 changes: 1 addition & 1 deletion tests/tests_app/components/multi_node/test_trainer.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@

import pytest
from lightning_utilities.core.imports import module_available
from tests_app.helpers.utils import no_warning_call
from lightning_utilities.test.warning import no_warning_call

import pytorch_lightning as pl
from lightning_app.components.multi_node.trainer import _LightningTrainerRunExecutor
Expand Down
30 changes: 0 additions & 30 deletions tests/tests_app/helpers/utils.py

This file was deleted.

8 changes: 1 addition & 7 deletions tests/tests_examples_app/public/test_multi_node.py
Original file line number Diff line number Diff line change
Expand Up @@ -55,12 +55,6 @@ def on_before_run_once(self):
@mock.patch("lightning_app.components.multi_node.base.is_running_in_cloud", return_value=True)
def test_multi_node_examples(_, app_name, monkeypatch):
monkeypatch.chdir(os.path.join(_PATH_EXAMPLES, "app_multi_node"))
command_line = [
app_name,
"--blocking",
"False",
"--open-ui",
"False",
]
command_line = [app_name, "--blocking", "False", "--open-ui", "False", "--setup"]
result = application_testing(LightningTestMultiNodeWorksApp, command_line)
assert result.exit_code == 0
40 changes: 0 additions & 40 deletions tests/tests_fabric/helpers/utils.py

This file was deleted.

2 changes: 1 addition & 1 deletion tests/tests_fabric/plugins/environments/test_slurm.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,8 +18,8 @@
from unittest import mock

import pytest
from lightning_utilities.test.warning import no_warning_call
from tests_fabric.helpers.runif import RunIf
from tests_fabric.helpers.utils import no_warning_call

from lightning_fabric.plugins.environments import SLURMEnvironment
from lightning_fabric.utilities.warnings import PossibleUserWarning
Expand Down
2 changes: 1 addition & 1 deletion tests/tests_fabric/test_fabric.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,8 +20,8 @@
import torch
import torch.distributed
import torch.nn.functional
from lightning_utilities.test.warning import no_warning_call
from tests_fabric.helpers.runif import RunIf
from tests_fabric.helpers.utils import no_warning_call
from torch import nn
from torch.utils.data import DataLoader, DistributedSampler, RandomSampler, Sampler, SequentialSampler, TensorDataset

Expand Down
2 changes: 1 addition & 1 deletion tests/tests_pytorch/callbacks/test_callbacks.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,11 +16,11 @@
from unittest.mock import Mock

import pytest
from lightning_utilities.test.warning import no_warning_call

from pytorch_lightning import Callback, Trainer
from pytorch_lightning.callbacks import ModelCheckpoint
from pytorch_lightning.demos.boring_classes import BoringModel
from tests_pytorch.helpers.utils import no_warning_call


def test_callbacks_configured_in_model(tmpdir):
Expand Down
2 changes: 1 addition & 1 deletion tests/tests_pytorch/core/test_metric_result_integration.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@
import pytest
import torch
import torchmetrics
from lightning_utilities.test.warning import no_warning_call
from torch.nn import ModuleDict, ModuleList
from torchmetrics import Metric, MetricCollection

Expand All @@ -36,7 +37,6 @@
)
from tests_pytorch.core.test_results import spawn_launch
from tests_pytorch.helpers.runif import RunIf
from tests_pytorch.helpers.utils import no_warning_call


class DummyMetric(Metric):
Expand Down
2 changes: 1 addition & 1 deletion tests/tests_pytorch/deprecated_api/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@
from contextlib import contextmanager
from typing import Optional

from tests_pytorch.helpers.utils import no_warning_call
from lightning_utilities.test.warning import no_warning_call


@contextmanager
Expand Down
4 changes: 1 addition & 3 deletions tests/tests_pytorch/deprecated_api/test_remove_1-10.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@
import numpy
import pytest
import torch
from lightning_utilities.test.warning import no_warning_call
from torch.utils.data import DataLoader

from pytorch_lightning import Trainer
Expand All @@ -29,7 +30,6 @@
from pytorch_lightning.overrides.fairscale import LightningShardedDataParallel, unwrap_lightning_module_sharded
from pytorch_lightning.plugins.environments import LightningEnvironment
from pytorch_lightning.strategies.bagua import LightningBaguaModule
from pytorch_lightning.strategies.deepspeed import LightningDeepSpeedModule
from pytorch_lightning.strategies.utils import on_colab_kaggle
from pytorch_lightning.trainer.states import RunningStage, TrainerFn
from pytorch_lightning.utilities.apply_func import (
Expand Down Expand Up @@ -66,7 +66,6 @@
from pytorch_lightning.utilities.seed import pl_worker_init_function, reset_seed, seed_everything
from pytorch_lightning.utilities.xla_device import inner_f, pl_multi_process, XLADeviceUtils
from tests_pytorch.helpers.runif import RunIf
from tests_pytorch.helpers.utils import no_warning_call


def test_deprecated_amp_level():
Expand All @@ -80,7 +79,6 @@ def test_deprecated_amp_level():
LightningParallelModule,
LightningDistributedModule,
LightningBaguaModule,
LightningDeepSpeedModule,
pytest.param(LightningShardedDataParallel, marks=RunIf(fairscale=True)),
],
)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,14 +5,14 @@
import pytest
import torch
from lightning_utilities.core.imports import package_available
from lightning_utilities.test.warning import no_warning_call
from packaging.version import Version

from tests_pytorch.checkpointing.test_legacy_checkpoints import (
CHECKPOINT_EXTENSION,
LEGACY_BACK_COMPATIBLE_PL_VERSIONS,
LEGACY_CHECKPOINTS_PATH,
)
from tests_pytorch.helpers.utils import no_warning_call


@pytest.mark.parametrize("pl_version", LEGACY_BACK_COMPATIBLE_PL_VERSIONS)
Expand Down
27 changes: 0 additions & 27 deletions tests/tests_pytorch/helpers/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,11 +13,6 @@
# limitations under the License.
import functools
import os
import re
from contextlib import contextmanager
from typing import Optional, Type

import pytest

from pytorch_lightning.callbacks import ModelCheckpoint
from pytorch_lightning.demos.boring_classes import BoringModel
Expand Down Expand Up @@ -70,25 +65,3 @@ def init_checkpoint_callback(logger):

def getattr_recursive(obj, attr):
return functools.reduce(getattr, [obj] + attr.split("."))


@contextmanager
def no_warning_call(expected_warning: Type[Warning] = UserWarning, match: Optional[str] = None):
with pytest.warns(None) as record:
yield

if match is None:
try:
w = record.pop(expected_warning)
except AssertionError:
# no warning raised
return
else:
for w in record.list:
if w.category is expected_warning and re.compile(match).search(w.message.args[0]):
break
else:
return

msg = "A warning" if expected_warning is None else f"`{expected_warning.__name__}`"
raise AssertionError(f"{msg} was raised: {w}")
2 changes: 1 addition & 1 deletion tests/tests_pytorch/loggers/test_wandb.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,14 +16,14 @@
from unittest import mock

import pytest
from lightning_utilities.test.warning import no_warning_call

import pytorch_lightning
from pytorch_lightning import Trainer
from pytorch_lightning.callbacks import ModelCheckpoint
from pytorch_lightning.demos.boring_classes import BoringModel
from pytorch_lightning.loggers import WandbLogger
from pytorch_lightning.utilities.exceptions import MisconfigurationException
from tests_pytorch.helpers.utils import no_warning_call


@mock.patch("pytorch_lightning.loggers.wandb.Run", new=mock.Mock)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@
# limitations under the License.
import pytest
import torch
from lightning_utilities.test.warning import no_warning_call
from torch.utils.data import DataLoader
from torch.utils.data._utils.collate import default_collate

Expand All @@ -22,7 +23,6 @@
from pytorch_lightning.loops.optimization.optimizer_loop import Closure
from pytorch_lightning.trainer.states import RunningStage
from tests_pytorch.helpers.deterministic_model import DeterministicModel
from tests_pytorch.helpers.utils import no_warning_call


def test__training_step__flow_scalar(tmpdir):
Expand Down
2 changes: 1 addition & 1 deletion tests/tests_pytorch/models/test_hparams.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@
import torch
from fsspec.implementations.local import LocalFileSystem
from lightning_utilities.core.imports import RequirementCache
from lightning_utilities.test.warning import no_warning_call
from torch.utils.data import DataLoader

from pytorch_lightning import LightningModule, Trainer
Expand All @@ -37,7 +38,6 @@
from pytorch_lightning.utilities import _OMEGACONF_AVAILABLE, AttributeDict, is_picklable
from pytorch_lightning.utilities.exceptions import MisconfigurationException
from tests_pytorch.helpers.runif import RunIf
from tests_pytorch.helpers.utils import no_warning_call

if _OMEGACONF_AVAILABLE:
from omegaconf import Container, OmegaConf
Expand Down
5 changes: 3 additions & 2 deletions tests/tests_pytorch/models/test_restore.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@
import pytest
import torch
import torch.nn.functional as F
from lightning_utilities.test.warning import no_warning_call

import tests_pytorch.helpers.pipelines as tpipes
import tests_pytorch.helpers.utils as tutils
Expand Down Expand Up @@ -809,12 +810,12 @@ def test_restarting_mid_epoch_raises_warning(tmpdir, stop_in_the_middle, model_c
trainer = Trainer(max_epochs=2, **trainer_kwargs)
model.stop_batch_idx = -1

context_manager = pytest.warns if stop_in_the_middle else tutils.no_warning_call
context_manager = pytest.warns if stop_in_the_middle else no_warning_call
with context_manager(UserWarning, match="resuming from a checkpoint that ended"):
trainer.fit(model, ckpt_path=ckpt_path)

if stop_in_the_middle:
with mock.patch.dict(os.environ, {"PL_FAULT_TOLERANT_TRAINING": "1"}):
trainer = Trainer(max_epochs=2, **trainer_kwargs)
with tutils.no_warning_call(UserWarning, match="resuming from a checkpoint that ended"):
with no_warning_call(UserWarning, match="resuming from a checkpoint that ended"):
trainer.fit(model, ckpt_path=ckpt_path)
2 changes: 1 addition & 1 deletion tests/tests_pytorch/test_cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@
import pytest
import torch
import yaml
from lightning_utilities.test.warning import no_warning_call
from torch.optim import SGD
from torch.optim.lr_scheduler import ReduceLROnPlateau, StepLR

Expand All @@ -51,7 +52,6 @@
from pytorch_lightning.utilities.exceptions import MisconfigurationException
from pytorch_lightning.utilities.imports import _TORCHVISION_AVAILABLE
from tests_pytorch.helpers.runif import RunIf
from tests_pytorch.helpers.utils import no_warning_call

if _JSONARGPARSE_SIGNATURES_AVAILABLE:
from jsonargparse import lazy_instance, Namespace
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@
from unittest.mock import Mock

import pytest
from lightning_utilities.test.warning import no_warning_call
from torch import Tensor
from torch.utils.data import BatchSampler, DataLoader, DistributedSampler, Sampler, SequentialSampler

Expand All @@ -32,7 +33,6 @@
from pytorch_lightning.utilities.data import _update_dataloader
from pytorch_lightning.utilities.exceptions import MisconfigurationException
from tests_pytorch.helpers.runif import RunIf
from tests_pytorch.helpers.utils import no_warning_call


@RunIf(skip_windows=True)
Expand Down
Loading

0 comments on commit 8aa96d0

Please sign in to comment.