From a41f0a0a121d05140155367f3ebd59966aecf3c5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E2=80=9Cteerasak=E2=80=9D?= Date: Fri, 23 Oct 2020 20:38:16 +0700 Subject: [PATCH 1/2] Remove all deprecated args, kwargs for v0.5.0 (#1396) --- ignite/contrib/engines/common.py | 4 --- ignite/engine/engine.py | 24 ++++------------ ignite/engine/events.py | 11 -------- ignite/handlers/checkpoint.py | 31 --------------------- tests/ignite/contrib/engines/test_common.py | 3 -- tests/ignite/engine/test_custom_events.py | 14 +--------- tests/ignite/engine/test_engine.py | 3 -- tests/ignite/handlers/test_checkpoint.py | 12 -------- 8 files changed, 6 insertions(+), 96 deletions(-) diff --git a/ignite/contrib/engines/common.py b/ignite/contrib/engines/common.py index ed7169fd5b3c..27d708952f07 100644 --- a/ignite/contrib/engines/common.py +++ b/ignite/contrib/engines/common.py @@ -43,7 +43,6 @@ def setup_common_training_handlers( with_pbars: bool = True, with_pbar_on_iters: bool = True, log_every_iters: int = 100, - device: Optional[Union[str, torch.device]] = None, stop_on_nan: bool = True, clear_cuda_cache: bool = True, save_handler: Optional[Union[Callable, BaseSaveHandler]] = None, @@ -86,10 +85,7 @@ def setup_common_training_handlers( class to use to store ``to_save``. See :class:`~ignite.handlers.checkpoint.Checkpoint` for more details. Argument is mutually exclusive with ``output_path``. **kwargs: optional keyword args to be passed to construct :class:`~ignite.handlers.checkpoint.Checkpoint`. - device (str of torch.device, optional): deprecated argument, it will be removed in v0.5.0. """ - if device is not None: - warnings.warn("Argument device is unused and deprecated. It will be removed in v0.5.0") _kwargs = dict( to_save=to_save, diff --git a/ignite/engine/engine.py b/ignite/engine/engine.py index 3e28f8761e01..d14114523c64 100644 --- a/ignite/engine/engine.py +++ b/ignite/engine/engine.py @@ -473,7 +473,7 @@ def state_dict_user_keys(self) -> List: return self._state_dict_user_keys def state_dict(self) -> OrderedDict: - """Returns a dictionary containing engine's state: "seed", "epoch_length", "max_epochs" and "iteration" and + """Returns a dictionary containing engine's state: "epoch_length", "max_epochs" and "iteration" and other state values defined by `engine.state_dict_user_keys` .. code-block:: python @@ -506,8 +506,8 @@ def save_engine(_): def load_state_dict(self, state_dict: Mapping) -> None: """Setups engine from `state_dict`. - State dictionary should contain keys: `iteration` or `epoch` and `max_epochs`, `epoch_length` and - `seed`. If `engine.state_dict_user_keys` contains keys, they should be also present in the state dictionary. + State dictionary should contain keys: `iteration` or `epoch`, `max_epochs` and `epoch_length`. + If `engine.state_dict_user_keys` contains keys, they should be also present in the state dictionary. Iteration and epoch values are 0-based: the first iteration or epoch is zero. This method does not remove any custom attributs added by user. @@ -595,18 +595,12 @@ def switch_dataloader(): self.state.dataloader = data self._dataloader_iter = iter(self.state.dataloader) - def run( - self, - data: Iterable, - max_epochs: Optional[int] = None, - epoch_length: Optional[int] = None, - seed: Optional[int] = None, - ) -> State: + def run(self, data: Iterable, max_epochs: Optional[int] = None, epoch_length: Optional[int] = None,) -> State: """Runs the `process_function` over the passed data. Engine has a state and the following logic is applied in this function: - - At the first call, new state is defined by `max_epochs`, `epoch_length`, `seed` if provided. A timer for + - At the first call, new state is defined by `max_epochs`, `epoch_length` if provided. A timer for total and per-epoch time is initialized when Events.STARTED is handled. - If state is already defined such that there are iterations to run until `max_epochs` and no input arguments provided, state is kept and used in the function. @@ -623,8 +617,6 @@ def run( `len(data)`. If `data` is an iterator and `epoch_length` is not set, then it will be automatically determined as the iteration on which data iterator raises `StopIteration`. This argument should not change if run is resuming from a state. - seed (int, optional): Deprecated argument. Please, use `torch.manual_seed` or - :meth:`~ignite.utils.manual_seed`. Returns: State: output state. @@ -653,12 +645,6 @@ def switch_batch(engine): trainer.run(train_loader, max_epochs=2) """ - if seed is not None: - warnings.warn( - "Argument seed is deprecated. It will be removed in 0.5.0. " - "Please, use torch.manual_seed or ignite.utils.manual_seed" - ) - if not isinstance(data, Iterable): raise TypeError("Argument data should be iterable") diff --git a/ignite/engine/events.py b/ignite/engine/events.py index 6ef19b7b4246..abb0e8e64043 100644 --- a/ignite/engine/events.py +++ b/ignite/engine/events.py @@ -135,17 +135,6 @@ def __or__(self, other: Any) -> "EventsList": return EventsList() | self | other -class CallableEvents(CallableEventWithFilter): - # For backward compatibility - def __init__(self, *args: Any, **kwargs: Any) -> None: - super(CallableEvents, self).__init__(*args, **kwargs) - warnings.warn( - "Class ignite.engine.events.CallableEvents is deprecated. It will be removed in 0.5.0. " - "Please, use ignite.engine.EventEnum instead", - DeprecationWarning, - ) - - class EventEnum(CallableEventWithFilter, Enum): # type: ignore[misc] """Base class for all :class:`~ignite.engine.events.Events`. User defined custom events should also inherit this class. For example, Custom events based on the loss calculation and backward pass can be created as follows: diff --git a/ignite/handlers/checkpoint.py b/ignite/handlers/checkpoint.py index a1e4b441f6bc..255a9775951b 100644 --- a/ignite/handlers/checkpoint.py +++ b/ignite/handlers/checkpoint.py @@ -93,7 +93,6 @@ class Checkpoint(Serializable): Input of the function is ``(engine, event_name)``. Output of function should be an integer. Default is None, global_step based on attached engine. If provided, uses function output as global_step. To setup global step from another engine, please use :meth:`~ignite.handlers.global_step_from_engine`. - archived (bool, optional): Deprecated argument as models saved by ``torch.save`` are already compressed. filename_pattern (str, optional): If ``filename_pattern`` is provided, this pattern will be used to render checkpoint filenames. If the pattern is not defined, the default pattern would be used. See Note for details. @@ -246,7 +245,6 @@ def __init__( score_name: Optional[str] = None, n_saved: Optional[int] = 1, global_step_transform: Callable = None, - archived: bool = False, filename_pattern: Optional[str] = None, include_self: bool = False, ): @@ -279,8 +277,6 @@ def __init__( raise TypeError( "global_step_transform should be a function, got {} instead.".format(type(global_step_transform)) ) - if archived: - warnings.warn("Argument archived is deprecated and will be removed in 0.5.0") self.to_save = to_save self.filename_prefix = filename_prefix @@ -626,11 +622,6 @@ class ModelCheckpoint(Checkpoint): Behaviour of this class has been changed since v0.3.0. - Argument ``save_as_state_dict`` is deprecated and should not be used. It is considered as True. - - Argument ``save_interval`` is deprecated and should not be used. Please, use events filtering instead, e.g. - :attr:`~ignite.engine.events.Events.ITERATION_STARTED(every=1000)` - There is no more internal counter that has been used to indicate the number of save actions. User could see its value `step_number` in the filename, e.g. `{filename_prefix}_{name}_{step_number}.pt`. Actually, `step_number` is replaced by current engine's epoch if `score_function` is specified and current iteration @@ -659,7 +650,6 @@ class ModelCheckpoint(Checkpoint): Input of the function is `(engine, event_name)`. Output of function should be an integer. Default is None, global_step based on attached engine. If provided, uses function output as global_step. To setup global step from another engine, please use :meth:`~ignite.handlers.global_step_from_engine`. - archived (bool, optional): Deprecated argument as models saved by `torch.save` are already compressed. include_self (bool): Whether to include the `state_dict` of this object in the checkpoint. If `True`, then there must not be another object in ``to_save`` with key ``checkpointer``. **kwargs: Accepted keyword arguments for `torch.save` or `xm.save` in `DiskSaver`. @@ -684,37 +674,17 @@ def __init__( self, dirname: str, filename_prefix: str, - save_interval: Optional[Callable] = None, score_function: Optional[Callable] = None, score_name: Optional[str] = None, n_saved: Union[int, None] = 1, atomic: bool = True, require_empty: bool = True, create_dir: bool = True, - save_as_state_dict: bool = True, global_step_transform: Optional[Callable] = None, - archived: bool = False, include_self: bool = False, **kwargs ): - if not save_as_state_dict: - raise ValueError( - "Argument save_as_state_dict is deprecated and should be True." - "This argument will be removed in 0.5.0." - ) - if save_interval is not None: - msg = ( - "Argument save_interval is deprecated and should be None. This argument will be removed in 0.5.0." - "Please, use events filtering instead, e.g. Events.ITERATION_STARTED(every=1000)" - ) - if save_interval == 1: - # Do not break for old version who used `save_interval=1` - warnings.warn(msg) - else: - # No choice - raise ValueError(msg) - disk_saver = DiskSaver(dirname, atomic=atomic, create_dir=create_dir, require_empty=require_empty, **kwargs) super(ModelCheckpoint, self).__init__( @@ -725,7 +695,6 @@ def __init__( score_name=score_name, n_saved=n_saved, global_step_transform=global_step_transform, - archived=archived, include_self=include_self, ) diff --git a/tests/ignite/contrib/engines/test_common.py b/tests/ignite/contrib/engines/test_common.py index be9972b87572..e74c21c40629 100644 --- a/tests/ignite/contrib/engines/test_common.py +++ b/tests/ignite/contrib/engines/test_common.py @@ -140,9 +140,6 @@ def test_asserts_setup_common_training_handlers(): train_sampler = MagicMock(spec=DistributedSampler) setup_common_training_handlers(trainer, train_sampler=train_sampler) - with pytest.warns(UserWarning, match=r"Argument device is unused and deprecated"): - setup_common_training_handlers(trainer, device="cpu") - def test_no_warning_with_train_sampler(recwarn): from torch.utils.data import RandomSampler diff --git a/tests/ignite/engine/test_custom_events.py b/tests/ignite/engine/test_custom_events.py index 1bf0bbc04b6d..afeff7de8853 100644 --- a/tests/ignite/engine/test_custom_events.py +++ b/tests/ignite/engine/test_custom_events.py @@ -6,19 +6,7 @@ import ignite.distributed as idist from ignite.engine import Engine, Events -from ignite.engine.events import CallableEvents, CallableEventWithFilter, EventEnum, EventsList - - -def test_deprecated_callable_events_class(): - engine = Engine(lambda engine, batch: 0) - - with pytest.warns(DeprecationWarning, match=r"Class ignite\.engine\.events\.CallableEvents is deprecated"): - - class CustomEvents(CallableEvents, Enum): - TEST_EVENT = "test_event" - - with pytest.raises(TypeError, match=r"Value at \d of event_names should be a str or EventEnum"): - engine.register_events(*CustomEvents) +from ignite.engine.events import CallableEventWithFilter, EventEnum, EventsList def test_custom_events(): diff --git a/tests/ignite/engine/test_engine.py b/tests/ignite/engine/test_engine.py index 89505d265cb1..43701b4b1f4a 100644 --- a/tests/ignite/engine/test_engine.py +++ b/tests/ignite/engine/test_engine.py @@ -352,9 +352,6 @@ def test_run_asserts(): with pytest.raises(ValueError, match=r"Input data has zero size. Please provide non-empty data"): engine.run([]) - with pytest.warns(UserWarning, match="Argument seed is deprecated"): - engine.run([0, 1, 2, 3, 4], seed=1234) - def test_state_get_event_attrib_value(): state = State() diff --git a/tests/ignite/handlers/test_checkpoint.py b/tests/ignite/handlers/test_checkpoint.py index 11d044e2aa37..9dddeab184ab 100644 --- a/tests/ignite/handlers/test_checkpoint.py +++ b/tests/ignite/handlers/test_checkpoint.py @@ -62,9 +62,6 @@ def test_checkpoint_wrong_input(): with pytest.raises(TypeError, match=r"global_step_transform should be a function."): Checkpoint(to_save, lambda x: x, score_function=lambda e: 123, score_name="acc", global_step_transform=123) - with pytest.warns(UserWarning, match=r"Argument archived is deprecated"): - Checkpoint(to_save, lambda x: x, score_function=lambda e: 123, score_name="acc", archived=True) - with pytest.raises(ValueError, match=r"Cannot have key 'checkpointer' if `include_self` is True"): Checkpoint({"checkpointer": model}, lambda x: x, include_self=True) @@ -494,24 +491,15 @@ def test_model_checkpoint_args_validation(dirname): with pytest.raises(ValueError, match=r"with extension '.pt' are already present "): ModelCheckpoint(nonempty, _PREFIX) - with pytest.raises(ValueError, match=r"Argument save_interval is deprecated and should be None"): - ModelCheckpoint(existing, _PREFIX, save_interval=42) - with pytest.raises(ValueError, match=r"Directory path '\S+' is not found"): ModelCheckpoint(os.path.join(dirname, "non_existing_dir"), _PREFIX, create_dir=False) - with pytest.raises(ValueError, match=r"Argument save_as_state_dict is deprecated and should be True"): - ModelCheckpoint(existing, _PREFIX, create_dir=False, save_as_state_dict=False) - with pytest.raises(ValueError, match=r"If `score_name` is provided, then `score_function` "): ModelCheckpoint(existing, _PREFIX, create_dir=False, score_name="test") with pytest.raises(TypeError, match=r"global_step_transform should be a function"): ModelCheckpoint(existing, _PREFIX, create_dir=False, global_step_transform=1234) - with pytest.warns(UserWarning, match=r"Argument archived is deprecated"): - ModelCheckpoint(existing, _PREFIX, create_dir=False, archived=True) - h = ModelCheckpoint(dirname, _PREFIX, create_dir=False) assert h.last_checkpoint is None with pytest.raises(RuntimeError, match=r"No objects to checkpoint found."): From d2da93d2ff0aab139218e578dee1d0dc8c6481db Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E2=80=9Cteerasak=E2=80=9D?= Date: Fri, 23 Oct 2020 21:40:57 +0700 Subject: [PATCH 2/2] Improve deprecation message of setup_any_logging (#1396) --- ignite/contrib/engines/common.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/ignite/contrib/engines/common.py b/ignite/contrib/engines/common.py index 27d708952f07..44409765c4fa 100644 --- a/ignite/contrib/engines/common.py +++ b/ignite/contrib/engines/common.py @@ -253,8 +253,8 @@ def empty_cuda_cache(_): def setup_any_logging(logger, logger_module, trainer, optimizers, evaluators, log_every_iters): raise DeprecationWarning( - "ignite.contrib.engines.common.setup_any_logging is deprecated since 0.4.0. " - "Please use ignite.contrib.engines.common._setup_logging instead." + "ignite.contrib.engines.common.setup_any_logging is deprecated since 0.4.0. and will be remove in 0.6.0. " + "Please use instead: setup_tb_logging, setup_visdom_logging or setup_mlflow_logging etc." )