Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 2 additions & 6 deletions ignite/contrib/engines/common.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,6 @@ def setup_common_training_handlers(
with_pbars: bool = True,
with_pbar_on_iters: bool = True,
log_every_iters: int = 100,
device: Optional[Union[str, torch.device]] = None,
stop_on_nan: bool = True,
clear_cuda_cache: bool = True,
save_handler: Optional[Union[Callable, BaseSaveHandler]] = None,
Expand Down Expand Up @@ -86,10 +85,7 @@ def setup_common_training_handlers(
class to use to store ``to_save``. See :class:`~ignite.handlers.checkpoint.Checkpoint` for more details.
Argument is mutually exclusive with ``output_path``.
**kwargs: optional keyword args to be passed to construct :class:`~ignite.handlers.checkpoint.Checkpoint`.
device (str of torch.device, optional): deprecated argument, it will be removed in v0.5.0.
"""
if device is not None:
warnings.warn("Argument device is unused and deprecated. It will be removed in v0.5.0")

_kwargs = dict(
to_save=to_save,
Expand Down Expand Up @@ -257,8 +253,8 @@ def empty_cuda_cache(_):

def setup_any_logging(logger, logger_module, trainer, optimizers, evaluators, log_every_iters):
raise DeprecationWarning(
"ignite.contrib.engines.common.setup_any_logging is deprecated since 0.4.0. "
"Please use ignite.contrib.engines.common._setup_logging instead."
"ignite.contrib.engines.common.setup_any_logging is deprecated since 0.4.0. and will be remove in 0.6.0. "
"Please use instead: setup_tb_logging, setup_visdom_logging or setup_mlflow_logging etc."
)


Expand Down
24 changes: 5 additions & 19 deletions ignite/engine/engine.py
Original file line number Diff line number Diff line change
Expand Up @@ -473,7 +473,7 @@ def state_dict_user_keys(self) -> List:
return self._state_dict_user_keys

def state_dict(self) -> OrderedDict:
"""Returns a dictionary containing engine's state: "seed", "epoch_length", "max_epochs" and "iteration" and
"""Returns a dictionary containing engine's state: "epoch_length", "max_epochs" and "iteration" and
other state values defined by `engine.state_dict_user_keys`

.. code-block:: python
Expand Down Expand Up @@ -506,8 +506,8 @@ def save_engine(_):
def load_state_dict(self, state_dict: Mapping) -> None:
"""Setups engine from `state_dict`.

State dictionary should contain keys: `iteration` or `epoch` and `max_epochs`, `epoch_length` and
`seed`. If `engine.state_dict_user_keys` contains keys, they should be also present in the state dictionary.
State dictionary should contain keys: `iteration` or `epoch`, `max_epochs` and `epoch_length`.
If `engine.state_dict_user_keys` contains keys, they should be also present in the state dictionary.
Iteration and epoch values are 0-based: the first iteration or epoch is zero.

This method does not remove any custom attributs added by user.
Expand Down Expand Up @@ -595,18 +595,12 @@ def switch_dataloader():
self.state.dataloader = data
self._dataloader_iter = iter(self.state.dataloader)

def run(
self,
data: Iterable,
max_epochs: Optional[int] = None,
epoch_length: Optional[int] = None,
seed: Optional[int] = None,
) -> State:
def run(self, data: Iterable, max_epochs: Optional[int] = None, epoch_length: Optional[int] = None,) -> State:
"""Runs the `process_function` over the passed data.

Engine has a state and the following logic is applied in this function:

- At the first call, new state is defined by `max_epochs`, `epoch_length`, `seed` if provided. A timer for
- At the first call, new state is defined by `max_epochs`, `epoch_length` if provided. A timer for
total and per-epoch time is initialized when Events.STARTED is handled.
- If state is already defined such that there are iterations to run until `max_epochs` and no input arguments
provided, state is kept and used in the function.
Expand All @@ -623,8 +617,6 @@ def run(
`len(data)`. If `data` is an iterator and `epoch_length` is not set, then it will be automatically
determined as the iteration on which data iterator raises `StopIteration`.
This argument should not change if run is resuming from a state.
seed (int, optional): Deprecated argument. Please, use `torch.manual_seed` or
:meth:`~ignite.utils.manual_seed`.

Returns:
State: output state.
Expand Down Expand Up @@ -653,12 +645,6 @@ def switch_batch(engine):
trainer.run(train_loader, max_epochs=2)

"""
if seed is not None:
warnings.warn(
"Argument seed is deprecated. It will be removed in 0.5.0. "
"Please, use torch.manual_seed or ignite.utils.manual_seed"
)

if not isinstance(data, Iterable):
raise TypeError("Argument data should be iterable")

Expand Down
11 changes: 0 additions & 11 deletions ignite/engine/events.py
Original file line number Diff line number Diff line change
Expand Up @@ -135,17 +135,6 @@ def __or__(self, other: Any) -> "EventsList":
return EventsList() | self | other


class CallableEvents(CallableEventWithFilter):
# For backward compatibility
def __init__(self, *args: Any, **kwargs: Any) -> None:
super(CallableEvents, self).__init__(*args, **kwargs)
warnings.warn(
"Class ignite.engine.events.CallableEvents is deprecated. It will be removed in 0.5.0. "
"Please, use ignite.engine.EventEnum instead",
DeprecationWarning,
)


class EventEnum(CallableEventWithFilter, Enum): # type: ignore[misc]
"""Base class for all :class:`~ignite.engine.events.Events`. User defined custom events should also inherit
this class. For example, Custom events based on the loss calculation and backward pass can be created as follows:
Expand Down
31 changes: 0 additions & 31 deletions ignite/handlers/checkpoint.py
Original file line number Diff line number Diff line change
Expand Up @@ -93,7 +93,6 @@ class Checkpoint(Serializable):
Input of the function is ``(engine, event_name)``. Output of function should be an integer.
Default is None, global_step based on attached engine. If provided, uses function output as global_step.
To setup global step from another engine, please use :meth:`~ignite.handlers.global_step_from_engine`.
archived (bool, optional): Deprecated argument as models saved by ``torch.save`` are already compressed.
filename_pattern (str, optional): If ``filename_pattern`` is provided, this pattern will be used to render
checkpoint filenames. If the pattern is not defined, the default pattern would be used. See Note for
details.
Expand Down Expand Up @@ -246,7 +245,6 @@ def __init__(
score_name: Optional[str] = None,
n_saved: Optional[int] = 1,
global_step_transform: Callable = None,
archived: bool = False,
filename_pattern: Optional[str] = None,
include_self: bool = False,
):
Expand Down Expand Up @@ -279,8 +277,6 @@ def __init__(
raise TypeError(
"global_step_transform should be a function, got {} instead.".format(type(global_step_transform))
)
if archived:
warnings.warn("Argument archived is deprecated and will be removed in 0.5.0")

self.to_save = to_save
self.filename_prefix = filename_prefix
Expand Down Expand Up @@ -626,11 +622,6 @@ class ModelCheckpoint(Checkpoint):

Behaviour of this class has been changed since v0.3.0.

Argument ``save_as_state_dict`` is deprecated and should not be used. It is considered as True.

Argument ``save_interval`` is deprecated and should not be used. Please, use events filtering instead, e.g.
:attr:`~ignite.engine.events.Events.ITERATION_STARTED(every=1000)`

There is no more internal counter that has been used to indicate the number of save actions. User could
see its value `step_number` in the filename, e.g. `{filename_prefix}_{name}_{step_number}.pt`. Actually,
`step_number` is replaced by current engine's epoch if `score_function` is specified and current iteration
Expand Down Expand Up @@ -659,7 +650,6 @@ class ModelCheckpoint(Checkpoint):
Input of the function is `(engine, event_name)`. Output of function should be an integer.
Default is None, global_step based on attached engine. If provided, uses function output as global_step.
To setup global step from another engine, please use :meth:`~ignite.handlers.global_step_from_engine`.
archived (bool, optional): Deprecated argument as models saved by `torch.save` are already compressed.
include_self (bool): Whether to include the `state_dict` of this object in the checkpoint. If `True`, then
there must not be another object in ``to_save`` with key ``checkpointer``.
**kwargs: Accepted keyword arguments for `torch.save` or `xm.save` in `DiskSaver`.
Expand All @@ -684,37 +674,17 @@ def __init__(
self,
dirname: str,
filename_prefix: str,
save_interval: Optional[Callable] = None,
score_function: Optional[Callable] = None,
score_name: Optional[str] = None,
n_saved: Union[int, None] = 1,
atomic: bool = True,
require_empty: bool = True,
create_dir: bool = True,
save_as_state_dict: bool = True,
global_step_transform: Optional[Callable] = None,
archived: bool = False,
include_self: bool = False,
**kwargs
):

if not save_as_state_dict:
raise ValueError(
"Argument save_as_state_dict is deprecated and should be True."
"This argument will be removed in 0.5.0."
)
if save_interval is not None:
msg = (
"Argument save_interval is deprecated and should be None. This argument will be removed in 0.5.0."
"Please, use events filtering instead, e.g. Events.ITERATION_STARTED(every=1000)"
)
if save_interval == 1:
# Do not break for old version who used `save_interval=1`
warnings.warn(msg)
else:
# No choice
raise ValueError(msg)

disk_saver = DiskSaver(dirname, atomic=atomic, create_dir=create_dir, require_empty=require_empty, **kwargs)

super(ModelCheckpoint, self).__init__(
Expand All @@ -725,7 +695,6 @@ def __init__(
score_name=score_name,
n_saved=n_saved,
global_step_transform=global_step_transform,
archived=archived,
include_self=include_self,
)

Expand Down
3 changes: 0 additions & 3 deletions tests/ignite/contrib/engines/test_common.py
Original file line number Diff line number Diff line change
Expand Up @@ -140,9 +140,6 @@ def test_asserts_setup_common_training_handlers():
train_sampler = MagicMock(spec=DistributedSampler)
setup_common_training_handlers(trainer, train_sampler=train_sampler)

with pytest.warns(UserWarning, match=r"Argument device is unused and deprecated"):
setup_common_training_handlers(trainer, device="cpu")


def test_no_warning_with_train_sampler(recwarn):
from torch.utils.data import RandomSampler
Expand Down
14 changes: 1 addition & 13 deletions tests/ignite/engine/test_custom_events.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,19 +6,7 @@

import ignite.distributed as idist
from ignite.engine import Engine, Events
from ignite.engine.events import CallableEvents, CallableEventWithFilter, EventEnum, EventsList


def test_deprecated_callable_events_class():
engine = Engine(lambda engine, batch: 0)

with pytest.warns(DeprecationWarning, match=r"Class ignite\.engine\.events\.CallableEvents is deprecated"):

class CustomEvents(CallableEvents, Enum):
TEST_EVENT = "test_event"

with pytest.raises(TypeError, match=r"Value at \d of event_names should be a str or EventEnum"):
engine.register_events(*CustomEvents)
from ignite.engine.events import CallableEventWithFilter, EventEnum, EventsList


def test_custom_events():
Expand Down
3 changes: 0 additions & 3 deletions tests/ignite/engine/test_engine.py
Original file line number Diff line number Diff line change
Expand Up @@ -352,9 +352,6 @@ def test_run_asserts():
with pytest.raises(ValueError, match=r"Input data has zero size. Please provide non-empty data"):
engine.run([])

with pytest.warns(UserWarning, match="Argument seed is deprecated"):
engine.run([0, 1, 2, 3, 4], seed=1234)


def test_state_get_event_attrib_value():
state = State()
Expand Down
12 changes: 0 additions & 12 deletions tests/ignite/handlers/test_checkpoint.py
Original file line number Diff line number Diff line change
Expand Up @@ -62,9 +62,6 @@ def test_checkpoint_wrong_input():
with pytest.raises(TypeError, match=r"global_step_transform should be a function."):
Checkpoint(to_save, lambda x: x, score_function=lambda e: 123, score_name="acc", global_step_transform=123)

with pytest.warns(UserWarning, match=r"Argument archived is deprecated"):
Checkpoint(to_save, lambda x: x, score_function=lambda e: 123, score_name="acc", archived=True)

with pytest.raises(ValueError, match=r"Cannot have key 'checkpointer' if `include_self` is True"):
Checkpoint({"checkpointer": model}, lambda x: x, include_self=True)

Expand Down Expand Up @@ -494,24 +491,15 @@ def test_model_checkpoint_args_validation(dirname):
with pytest.raises(ValueError, match=r"with extension '.pt' are already present "):
ModelCheckpoint(nonempty, _PREFIX)

with pytest.raises(ValueError, match=r"Argument save_interval is deprecated and should be None"):
ModelCheckpoint(existing, _PREFIX, save_interval=42)

with pytest.raises(ValueError, match=r"Directory path '\S+' is not found"):
ModelCheckpoint(os.path.join(dirname, "non_existing_dir"), _PREFIX, create_dir=False)

with pytest.raises(ValueError, match=r"Argument save_as_state_dict is deprecated and should be True"):
ModelCheckpoint(existing, _PREFIX, create_dir=False, save_as_state_dict=False)

with pytest.raises(ValueError, match=r"If `score_name` is provided, then `score_function` "):
ModelCheckpoint(existing, _PREFIX, create_dir=False, score_name="test")

with pytest.raises(TypeError, match=r"global_step_transform should be a function"):
ModelCheckpoint(existing, _PREFIX, create_dir=False, global_step_transform=1234)

with pytest.warns(UserWarning, match=r"Argument archived is deprecated"):
ModelCheckpoint(existing, _PREFIX, create_dir=False, archived=True)

h = ModelCheckpoint(dirname, _PREFIX, create_dir=False)
assert h.last_checkpoint is None
with pytest.raises(RuntimeError, match=r"No objects to checkpoint found."):
Expand Down