Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[Enhancement] Replace warnings.warn with print_log #961

Merged
merged 2 commits into from
Mar 6, 2023
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
28 changes: 18 additions & 10 deletions mmengine/dataset/base_dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,15 +2,16 @@
import copy
import functools
import gc
import logging
import os.path as osp
import pickle
import warnings
from typing import Any, Callable, List, Optional, Sequence, Tuple, Union

import numpy as np
from torch.utils.data import Dataset

from mmengine.fileio import list_from_file, load
from mmengine.logging import print_log
from mmengine.registry import TRANSFORMS
from mmengine.utils import is_abs

Expand Down Expand Up @@ -100,11 +101,13 @@ def wrapper(obj: object, *args, **kwargs):
# `_fully_initialized` is False, call `full_init` and set
# `_fully_initialized` to True
if not getattr(obj, '_fully_initialized', False):
warnings.warn('Attribute `_fully_initialized` is not defined in '
f'{type(obj)} or `type(obj)._fully_initialized is '
'False, `full_init` will be called and '
f'{type(obj)}._fully_initialized will be set to '
'True')
print_log(
f'Attribute `_fully_initialized` is not defined in '
f'{type(obj)} or `type(obj)._fully_initialized is '
'False, `full_init` will be called and '
f'{type(obj)}._fully_initialized will be set to True',
logger='current',
level=logging.WARNING)
obj.full_init() # type: ignore
obj._fully_initialized = True # type: ignore

Expand Down Expand Up @@ -392,9 +395,11 @@ def __getitem__(self, idx: int) -> dict:
# to manually call `full_init` before dataset fed into dataloader to
# ensure all workers use shared RAM from master process.
if not self._fully_initialized:
warnings.warn(
print_log(
'Please call `full_init()` method manually to accelerate '
'the speed.')
'the speed.',
logger='current',
level=logging.WARNING)
self.full_init()

if self.test_mode:
Expand Down Expand Up @@ -498,8 +503,11 @@ def _load_metainfo(cls, metainfo: dict = None) -> dict:
try:
cls_metainfo[k] = list_from_file(v)
except (TypeError, FileNotFoundError):
warnings.warn(f'{v} is not a meta file, simply parsed as '
'meta information')
print_log(
f'{v} is not a meta file, simply parsed as meta '
'information',
logger='current',
level=logging.WARNING)
cls_metainfo[k] = v
else:
cls_metainfo[k] = v
Expand Down
24 changes: 17 additions & 7 deletions mmengine/dataset/dataset_wrapper.py
Original file line number Diff line number Diff line change
@@ -1,13 +1,14 @@
# Copyright (c) OpenMMLab. All rights reserved.
import bisect
import copy
import logging
import math
import warnings
from collections import defaultdict
from typing import List, Sequence, Tuple, Union

from torch.utils.data.dataset import ConcatDataset as _ConcatDataset

from mmengine.logging import print_log
from mmengine.registry import DATASETS
from .base_dataset import BaseDataset, force_full_init

Expand Down Expand Up @@ -148,8 +149,11 @@ def __len__(self):

def __getitem__(self, idx):
if not self._fully_initialized:
warnings.warn('Please call `full_init` method manually to '
'accelerate the speed.')
print_log(
'Please call `full_init` method manually to '
'accelerate the speed.',
logger='current',
level=logging.WARNING)
self.full_init()
dataset_idx, sample_idx = self._get_ori_dataset_idx(idx)
return self.datasets[dataset_idx][sample_idx]
Expand Down Expand Up @@ -263,8 +267,11 @@ def get_data_info(self, idx: int) -> dict:

def __getitem__(self, idx):
if not self._fully_initialized:
warnings.warn('Please call `full_init` method manually to '
'accelerate the speed.')
print_log(
'Please call `full_init` method manually to accelerate the '
'speed.',
logger='current',
level=logging.WARNING)
self.full_init()

sample_idx = self._get_ori_dataset_idx(idx)
Expand Down Expand Up @@ -470,9 +477,12 @@ def get_data_info(self, idx: int) -> dict:
return self.dataset.get_data_info(sample_idx)

def __getitem__(self, idx):
warnings.warn('Please call `full_init` method manually to '
'accelerate the speed.')
if not self._fully_initialized:
print_log(
'Please call `full_init` method manually to accelerate '
'the speed.',
logger='current',
level=logging.WARNING)
self.full_init()

ori_index = self._get_ori_dataset_idx(idx)
Expand Down
15 changes: 10 additions & 5 deletions mmengine/evaluator/metric.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
# Copyright (c) OpenMMLab. All rights reserved.
import warnings
import logging
from abc import ABCMeta, abstractmethod
from typing import Any, List, Optional, Sequence, Union

Expand Down Expand Up @@ -44,8 +44,11 @@ def __init__(self,
self.results: List[Any] = []
self.prefix = prefix or self.default_prefix
if self.prefix is None:
warnings.warn('The prefix is not set in metric class '
f'{self.__class__.__name__}.')
print_log(
'The prefix is not set in metric class '
f'{self.__class__.__name__}.',
logger='current',
level=logging.WARNING)

@property
def dataset_meta(self) -> Optional[dict]:
Expand Down Expand Up @@ -97,10 +100,12 @@ def evaluate(self, size: int) -> dict:
names of the metrics, and the values are corresponding results.
"""
if len(self.results) == 0:
warnings.warn(
print_log(
f'{self.__class__.__name__} got empty `self.results`. Please '
'ensure that the processed results are properly added into '
'`self.results` in `process` method.')
'`self.results` in `process` method.',
logger='current',
level=logging.WARNING)

results = collect_results(self.results, size, self.collect_device)

Expand Down
10 changes: 7 additions & 3 deletions mmengine/fileio/backends/base.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,9 @@
# Copyright (c) OpenMMLab. All rights reserved.
import warnings
import logging
from abc import ABCMeta, abstractmethod

from mmengine.logging import print_log


class BaseStorageBackend(metaclass=ABCMeta):
"""Abstract class of storage backends.
Expand All @@ -19,8 +21,10 @@ class BaseStorageBackend(metaclass=ABCMeta):

@property
def allow_symlink(self):
warnings.warn('allow_symlink will be deprecated in future',
DeprecationWarning)
print_log(
'allow_symlink will be deprecated in future',
logger='current',
level=logging.WARNING)
return self._allow_symlink

@property
Expand Down
14 changes: 9 additions & 5 deletions mmengine/fileio/file_client.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,11 @@
# Copyright (c) OpenMMLab. All rights reserved.
import inspect
import warnings
import logging
from contextlib import contextmanager
from pathlib import Path
from typing import Any, Generator, Iterator, Optional, Tuple, Union

from mmengine.logging import print_log
from mmengine.utils import is_filepath
from .backends import (BaseStorageBackend, HTTPBackend, LmdbBackend,
LocalBackend, MemcachedBackend, PetrelBackend)
Expand All @@ -14,9 +15,11 @@ class HardDiskBackend(LocalBackend):
"""Raw hard disks storage backend."""

def __init__(self) -> None:
warnings.warn(
print_log(
'"HardDiskBackend" is the alias of "LocalBackend" '
'and the former will be deprecated in future.', DeprecationWarning)
'and the former will be deprecated in future.',
logger='current',
level=logging.WARNING)

@property
def name(self):
Expand Down Expand Up @@ -83,11 +86,12 @@ class FileClient:
client: Any

def __new__(cls, backend=None, prefix=None, **kwargs):
warnings.warn(
print_log(
'"FileClient" will be deprecated in future. Please use io '
'functions in '
'https://mmengine.readthedocs.io/en/latest/api/fileio.html#file-io', # noqa: E501
DeprecationWarning)
logger='current',
level=logging.WARNING)
if backend is None and prefix is None:
backend = 'disk'
if backend is not None and backend not in cls._backends:
Expand Down
9 changes: 6 additions & 3 deletions mmengine/hooks/checkpoint_hook.py
Original file line number Diff line number Diff line change
@@ -1,12 +1,13 @@
# Copyright (c) OpenMMLab. All rights reserved.
import logging
import os.path as osp
import warnings
from math import inf
from pathlib import Path
from typing import Callable, Dict, List, Optional, Sequence, Union

from mmengine.dist import is_main_process
from mmengine.fileio import FileClient, get_file_backend
from mmengine.logging import print_log
from mmengine.registry import HOOKS
from mmengine.utils import is_list_of, is_seq_of
from .hook import Hook
Expand Down Expand Up @@ -138,9 +139,11 @@ def __init__(self,
self.args = kwargs

if file_client_args is not None:
warnings.warn(
print_log(
'"file_client_args" will be deprecated in future. '
'Please use "backend_args" instead', DeprecationWarning)
'Please use "backend_args" instead',
logger='current',
level=logging.WARNING)
if backend_args is not None:
raise ValueError(
'"file_client_args" and "backend_args" cannot be set '
Expand Down
9 changes: 6 additions & 3 deletions mmengine/hooks/logger_hook.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
# Copyright (c) OpenMMLab. All rights reserved.
import logging
import os
import os.path as osp
import warnings
from collections import OrderedDict
from pathlib import Path
from typing import Dict, Optional, Sequence, Union
Expand All @@ -12,6 +12,7 @@
from mmengine.fileio import FileClient, dump
from mmengine.fileio.io import get_file_backend
from mmengine.hooks import Hook
from mmengine.logging import print_log
from mmengine.registry import HOOKS
from mmengine.utils import is_tuple_of, scandir

Expand Down Expand Up @@ -94,9 +95,11 @@ def __init__(self,
self.out_dir = out_dir

if file_client_args is not None:
warnings.warn(
print_log(
'"file_client_args" will be deprecated in future. '
'Please use "backend_args" instead', DeprecationWarning)
'Please use "backend_args" instead',
logger='current',
level=logging.WARNING)
if backend_args is not None:
raise ValueError(
'"file_client_args" and "backend_args" cannot be set '
Expand Down
5 changes: 3 additions & 2 deletions mmengine/hooks/profiler_hook.py
Original file line number Diff line number Diff line change
@@ -1,14 +1,15 @@
# Copyright (c) OpenMMLab. All rights reserved.
import logging
import os
import os.path as osp
import sys
import warnings
from typing import Callable, Optional, Union

import torch

from mmengine.dist import master_only
from mmengine.hooks import Hook
from mmengine.logging import print_log
from mmengine.registry import HOOKS


Expand All @@ -18,7 +19,7 @@ def check_kineto() -> bool: # noqa
if torch.autograd.kineto_available():
kineto_exist = True
except AttributeError:
warnings.warn('NO KINETO')
print_log('NO KINETO', logger='current', level=logging.WARNING)
return kineto_exist


Expand Down
9 changes: 6 additions & 3 deletions mmengine/model/averaged_model.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
# Copyright (c) OpenMMLab. All rights reserved.
import warnings
import logging
from abc import abstractmethod
from copy import deepcopy
from typing import Optional
Expand All @@ -8,6 +8,7 @@
import torch.nn as nn
from torch import Tensor

from mmengine.logging import print_log
from mmengine.registry import MODELS


Expand Down Expand Up @@ -184,11 +185,13 @@ def __init__(self,
assert 0.0 < momentum < 1.0, 'momentum must be in range (0.0, 1.0)'\
f'but got {momentum}'
if momentum > 0.5:
warnings.warn(
print_log(
'The value of momentum in EMA is usually a small number,'
'which is different from the conventional notion of '
f'momentum but got {momentum}. Please make sure the '
f'value is correct.')
f'value is correct.',
logger='current',
level=logging.WARNING)
self.momentum = momentum

def avg_func(self, averaged_param: Tensor, source_param: Tensor,
Expand Down
8 changes: 5 additions & 3 deletions mmengine/model/base_module.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
# Copyright (c) OpenMMLab. All rights reserved.
import copy
import logging
import warnings
from abc import ABCMeta
from collections import defaultdict
from logging import FileHandler
Expand Down Expand Up @@ -139,8 +138,11 @@ def init_weights(self):
initialize(self, pretrained_cfg)
self._is_init = True
else:
warnings.warn(f'init_weights of {self.__class__.__name__} has '
f'been called more than once.')
print_log(
f'init_weights of {self.__class__.__name__} has '
f'been called more than once.',
logger='current',
level=logging.WARNING)

if is_top_level_module:
# self._dump_init_info(logger_name)
Expand Down
9 changes: 6 additions & 3 deletions mmengine/optim/optimizer/default_constructor.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
# Copyright (c) OpenMMLab. All rights reserved.
import warnings
import logging
from typing import List, Optional, Union

import torch
Expand Down Expand Up @@ -205,8 +205,11 @@ def add_params(self,
for name, param in module.named_parameters(recurse=False):
param_group = {'params': [param]}
if bypass_duplicate and self._is_in(param_group, params):
warnings.warn(f'{prefix} is duplicate. It is skipped since '
f'bypass_duplicate={bypass_duplicate}')
print_log(
f'{prefix} is duplicate. It is skipped since '
f'bypass_duplicate={bypass_duplicate}',
logger='current',
level=logging.WARNING)
continue
if not param.requires_grad:
params.append(param_group)
Expand Down