Skip to content

Commit

Permalink
update tests
Browse files Browse the repository at this point in the history
move tests to ignite.handlers along with fixtures and changing the
imports

add a test to ensure deprecation warnings for ignite.contrib.handlers
  • Loading branch information
leej3 committed Mar 21, 2024
1 parent a206c15 commit b2c0ab2
Show file tree
Hide file tree
Showing 18 changed files with 230 additions and 169 deletions.
72 changes: 0 additions & 72 deletions tests/ignite/contrib/conftest.py
Original file line number Diff line number Diff line change
@@ -1,79 +1,7 @@
import random
from pathlib import Path

import pytest


@pytest.fixture
def no_site_packages(request):
import sys

modules = {}
for k in sys.modules:
if request.param in k:
modules[k] = sys.modules[k]
for k in modules:
del sys.modules[k]

prev_path = list(sys.path)
sys.path = [p for p in sys.path if "site-packages" not in p]
yield "no_site_packages"
sys.path = prev_path
for k in modules:
sys.modules[k] = modules[k]


@pytest.fixture()
def visdom_offline_logfile(dirname):
log_file = dirname / "logs.visdom"
yield log_file


vd_hostname = None
vd_port = None
vd_server_process = None


@pytest.fixture()
def visdom_server():
# Start Visdom server once and stop it with visdom_server_stop
global vd_hostname, vd_port, vd_server_process

if vd_server_process is None:
import subprocess
import time

from visdom import Visdom
from visdom.server.build import download_scripts

(Path.home() / ".visdom").mkdir(exist_ok=True)
download_scripts()

vd_hostname = "localhost"
vd_port = random.randint(8089, 8887)

try:
vis = Visdom(server=vd_hostname, port=vd_port, raise_exceptions=True)
except ConnectionError:
pass

vd_server_process = subprocess.Popen(
["python", "-m", "visdom.server", "--hostname", vd_hostname, "-port", str(vd_port)]
)
time.sleep(5)

vis = Visdom(server=vd_hostname, port=vd_port)
assert vis.check_connection()
vis.close()

yield (vd_hostname, vd_port)


@pytest.fixture()
def visdom_server_stop():
yield None

import time

vd_server_process.kill()
time.sleep(2)
4 changes: 2 additions & 2 deletions tests/ignite/contrib/engines/test_common.py
Original file line number Diff line number Diff line change
Expand Up @@ -62,11 +62,11 @@ def _test_setup_common_training_handlers(
if lr_scheduler is None:
lr_scheduler = torch.optim.lr_scheduler.StepLR(optimizer, step_size=step_size, gamma=gamma)
elif isinstance(lr_scheduler, str) and lr_scheduler == "ignite|LRScheduler":
from ignite.contrib.handlers import LRScheduler
from ignite.handlers import LRScheduler

lr_scheduler = LRScheduler(torch.optim.lr_scheduler.StepLR(optimizer, step_size=step_size, gamma=gamma))
elif isinstance(lr_scheduler, str) and lr_scheduler == "ignite":
from ignite.contrib.handlers import PiecewiseLinear
from ignite.handlers import PiecewiseLinear

milestones_values = [(0, 0.0), (step_size, lr), (num_iters * (num_epochs - 1), 0.0)]
lr_scheduler = PiecewiseLinear(optimizer, param_name="lr", milestones_values=milestones_values)
Expand Down
14 changes: 0 additions & 14 deletions tests/ignite/contrib/handlers/__init__.py

This file was deleted.

48 changes: 0 additions & 48 deletions tests/ignite/contrib/handlers/conftest.py

This file was deleted.

98 changes: 98 additions & 0 deletions tests/ignite/contrib/handlers/test_warnings_of_deprecation.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,98 @@
from importlib import __import__

import pytest


@pytest.mark.parametrize(
"log_module,fromlist",
[
("mlflow_logger", ["MLflowLogger", "OptimizerParamsHandler", "OutputHandler"]),
("polyaxon_logger", ["PolyaxonLogger", "OutputHandler", "OptimizerParamsHandler"]),
("wandb_logger", ["WandBLogger", "OutputHandler", "OptimizerParamsHandler"]),
("lr_finder", ["FastaiLRFinder"]),
("tqdm_logger", ["ProgressBar"]),
(
"clearml_logger",
[
"ClearMLLogger",
"ClearMLSaver",
"OptimizerParamsHandler",
"OutputHandler",
"WeightsScalarHandler",
"WeightsHistHandler",
"GradsScalarHandler",
"GradsHistHandler",
],
),
(
"tensorboard_logger",
[
"TensorboardLogger",
"OptimizerParamsHandler",
"OutputHandler",
"WeightsScalarHandler",
"WeightsHistHandler",
"GradsScalarHandler",
"GradsHistHandler",
],
),
(
"visdom_logger",
[
"VisdomLogger",
"OptimizerParamsHandler",
"OutputHandler",
"WeightsScalarHandler",
"GradsScalarHandler",
],
),
(
"neptune_logger",
[
"NeptuneLogger",
"NeptuneSaver",
"OptimizerParamsHandler",
"OutputHandler",
"WeightsScalarHandler",
"GradsScalarHandler",
],
),
(
"base_logger",
[
"BaseHandler",
"BaseWeightsHandler",
"BaseOptimizerParamsHandler",
"BaseOutputHandler",
"BaseWeightsScalarHandler",
"BaseLogger",
],
),
(
"time_profilers",
[
"BasicTimeProfiler",
"HandlersTimeProfiler",
],
),
(
"param_scheduler",
[
"ConcatScheduler",
"CosineAnnealingScheduler",
"LinearCyclicalScheduler",
"LRScheduler",
"ParamGroupScheduler",
"ParamScheduler",
"PiecewiseLinear",
"CyclicalScheduler",
"create_lr_scheduler_with_warmup",
],
),
],
)
def test_imports(log_module, fromlist):
with pytest.warns(DeprecationWarning, match="will be removed in version 0.6.0"):
imported = __import__(f"ignite.contrib.handlers.{log_module}", globals(), locals(), fromlist)
for attr in fromlist:
getattr(imported, attr)
14 changes: 14 additions & 0 deletions tests/ignite/handlers/__init__.py
Original file line number Diff line number Diff line change
@@ -1 +1,15 @@
# Needed to collect coverage data
class MockFP16DeepSpeedZeroOptimizer:
def __init__(self, optimizer):
self.optimizer = optimizer

def step(self, closure=None):
self.optimizer.step()

def _get_param_groups(self):
return self.optimizer.param_groups

def _set_param_groups(self, value):
self.optimizer.param_groups = value

param_groups = property(_get_param_groups, _set_param_groups)
88 changes: 87 additions & 1 deletion tests/ignite/handlers/conftest.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,89 @@
import random
from pathlib import Path
from unittest.mock import Mock

import pytest
import torch

vd_hostname = None
vd_port = None
vd_server_process = None


@pytest.fixture()
def visdom_server():
# Start Visdom server once and stop it with visdom_server_stop
global vd_hostname, vd_port, vd_server_process

if vd_server_process is None:
import subprocess
import time

from visdom import Visdom
from visdom.server.build import download_scripts

(Path.home() / ".visdom").mkdir(exist_ok=True)
download_scripts()

vd_hostname = "localhost"
vd_port = random.randint(8089, 8887)

try:
vis = Visdom(server=vd_hostname, port=vd_port, raise_exceptions=True)
except ConnectionError:
pass

vd_server_process = subprocess.Popen(
["python", "-m", "visdom.server", "--hostname", vd_hostname, "-port", str(vd_port)]
)
time.sleep(5)

vis = Visdom(server=vd_hostname, port=vd_port)
assert vis.check_connection()
vis.close()

yield (vd_hostname, vd_port)


@pytest.fixture()
def visdom_server_stop():
yield None

import time

vd_server_process.kill()
time.sleep(2)


@pytest.fixture
def no_site_packages(request):
import sys

modules = {}
for k in sys.modules:
if request.param in k:
modules[k] = sys.modules[k]
for k in modules:
del sys.modules[k]

prev_path = list(sys.path)
sys.path = [p for p in sys.path if "site-packages" not in p]
yield "no_site_packages"
sys.path = prev_path
for k in modules:
sys.modules[k] = modules[k]


@pytest.fixture()
def norm_mock():
def norm(x: torch.Tensor):
return x.norm()

norm_mock = Mock(side_effect=norm, spec=norm)
norm_mock.configure_mock(**{"__name__": "norm"})
norm_mock.reset_mock()
return norm_mock


@pytest.fixture()
def dummy_model_factory():
Expand All @@ -14,7 +97,7 @@ def __init__(self):
self.fc2.weight.data.fill_(1.0)
self.fc2.bias.data.fill_(1.0)

def get_dummy_model(with_grads=True, with_frozen_layer=False):
def get_dummy_model(with_grads=True, with_frozen_layer=False, with_buffer=False):
model = DummyModel()
if with_grads:
model.fc2.weight.grad = torch.zeros_like(model.fc2.weight)
Expand All @@ -27,6 +110,9 @@ def get_dummy_model(with_grads=True, with_frozen_layer=False):
if with_frozen_layer:
for param in model.fc1.parameters():
param.requires_grad = False

if with_buffer:
model.register_buffer("buffer1", torch.ones(1))
return model

return get_dummy_model

0 comments on commit b2c0ab2

Please sign in to comment.