Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[PY] change hardcoded names of loggers to __name__ #11089

Open
wants to merge 3 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
2 changes: 1 addition & 1 deletion apps/topi_recipe/conv/test_conv_int8_arm.py
Expand Up @@ -24,7 +24,7 @@
from tvm import topi

logging.basicConfig(stream=sys.stdout, level=logging.INFO)
LOGGER = logging.getLogger("test_conv_int8_intel")
LOGGER = logging.getLogger(__name__)
LOGGER.disabled = False

# All the WORKLOADS from Resnet except first layer
Expand Down
2 changes: 1 addition & 1 deletion apps/topi_recipe/conv/test_conv_int8_intel.py
Expand Up @@ -24,7 +24,7 @@
from tvm import topi

logging.basicConfig(stream=sys.stdout, level=logging.INFO)
LOGGER = logging.getLogger("test_conv_int8_intel")
LOGGER = logging.getLogger(__name__)
LOGGER.disabled = False

# All the WORKLOADS from Resnet except first layer
Expand Down
4 changes: 2 additions & 2 deletions docs/arch/relay_op_strategy.rst
Expand Up @@ -278,5 +278,5 @@ model to learn which implementation is used for each operator.

.. code:: python

logging.getLogger("te_compiler").setLevel(logging.INFO)
logging.getLogger("te_compiler").addHandler(logging.StreamHandler(sys.stdout))
logging.getLogger("tvm.relay.backend.te_compiler").setLevel(logging.INFO)
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

i'm not an expert in our doc rendering, but looks like __name__ doesn't work in this case? if so, this change looks good to me

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Me too, I think it is example script in which user is configuring logging, so I just updated old logger name to new one.

logging.getLogger("tvm.relay.backend.te_compiler").addHandler(logging.StreamHandler(sys.stdout))
1 change: 1 addition & 0 deletions docs/contribute/code_guide.rst
Expand Up @@ -88,6 +88,7 @@ Python Code Styles
- The functions and classes are documented in `numpydoc <https://numpydoc.readthedocs.io/en/latest/>`_ format.
- Check your code style using ``python tests/scripts/ci.py lint``
- Stick to language features in ``python 3.7``
- If you use `logging` module, you should use generic logger names, i.e. ``logging.getLogger(__name__)`` instead of ``logging.getLogger("tvm.runtime")``


Writing Python Tests
Expand Down
4 changes: 2 additions & 2 deletions gallery/how_to/tune_with_autotvm/tune_conv2d_cuda.py
Expand Up @@ -182,8 +182,8 @@ def conv2d_no_batching(N, H, W, CO, CI, KH, KW, stride, padding):
# for this template

# logging config (for printing tuning log to screen)
logging.getLogger("autotvm").setLevel(logging.DEBUG)
logging.getLogger("autotvm").addHandler(logging.StreamHandler(sys.stdout))
logging.getLogger("tvm.autotvm").setLevel(logging.DEBUG)
logging.getLogger("tvm.autotvm").addHandler(logging.StreamHandler(sys.stdout))

# the last layer in resnet
N, H, W, CO, CI, KH, KW, strides, padding = 1, 7, 7, 512, 512, 3, 3, (1, 1), (1, 1)
Expand Down
2 changes: 1 addition & 1 deletion gallery/how_to/tune_with_autotvm/tune_relay_arm.py
Expand Up @@ -411,6 +411,6 @@ def tune_and_evaluate(tuning_opt):
# .. code-block:: python
#
# import logging
# logging.getLogger('autotvm').setLevel(logging.DEBUG)
# logging.getLogger('tvm.autotvm').setLevel(logging.DEBUG)
#
# Finally, always feel free to ask our community for help on https://discuss.tvm.apache.org
2 changes: 1 addition & 1 deletion gallery/how_to/tune_with_autotvm/tune_relay_cuda.py
Expand Up @@ -302,7 +302,7 @@ def tune_and_evaluate(tuning_opt):
# .. code-block:: python
#
# import logging
# logging.getLogger('autotvm').setLevel(logging.DEBUG)
# logging.getLogger('tvm.autotvm').setLevel(logging.DEBUG)
#
# Finally, always feel free to ask our community for help on https://discuss.tvm.apache.org

Expand Down
2 changes: 1 addition & 1 deletion gallery/how_to/tune_with_autotvm/tune_relay_mobile_gpu.py
Expand Up @@ -409,6 +409,6 @@ def tune_and_evaluate(tuning_opt):
# .. code-block:: python
#
# import logging
# logging.getLogger('autotvm').setLevel(logging.DEBUG)
# logging.getLogger('tvm.autotvm').setLevel(logging.DEBUG)
#
# Finally, always feel free to ask our community for help on https://discuss.tvm.apache.org
4 changes: 2 additions & 2 deletions gallery/tutorial/autotvm_matmul_x86.py
Expand Up @@ -321,8 +321,8 @@ def matmul(N, L, M, dtype):
# configuration discovered by the tuner later.

# logging config (for printing tuning log to the screen)
logging.getLogger("autotvm").setLevel(logging.DEBUG)
logging.getLogger("autotvm").addHandler(logging.StreamHandler(sys.stdout))
logging.getLogger("tvm.autotvm").setLevel(logging.DEBUG)
logging.getLogger("tvm.autotvm").addHandler(logging.StreamHandler(sys.stdout))

################################################################################
# There are two steps for measuring a config: build and run. By default, we use
Expand Down
2 changes: 1 addition & 1 deletion python/tvm/auto_scheduler/cost_model/xgb_model.py
Expand Up @@ -30,7 +30,7 @@

xgb = None

logger = logging.getLogger("auto_scheduler")
logger = logging.getLogger(__name__)


class XGBDMatrixContext:
Expand Down
2 changes: 1 addition & 1 deletion python/tvm/auto_scheduler/dispatcher.py
Expand Up @@ -37,7 +37,7 @@
from .search_task import SearchTask, TuningOptions
from .utils import calc_workload_dis_factor, decode_workload_key

logger = logging.getLogger("auto_scheduler")
logger = logging.getLogger(__name__)


class DispatchContext(object):
Expand Down
2 changes: 1 addition & 1 deletion python/tvm/auto_scheduler/measure.py
Expand Up @@ -64,7 +64,7 @@
)

# pylint: disable=invalid-name
logger = logging.getLogger("auto_scheduler")
logger = logging.getLogger(__name__)

# The time cost for measurements with errors
# We use 1e10 instead of sys.float_info.max for better readability in log
Expand Down
2 changes: 1 addition & 1 deletion python/tvm/auto_scheduler/measure_record.py
Expand Up @@ -30,7 +30,7 @@
from .utils import calc_workload_dis_factor, decode_workload_key
from . import _ffi_api

logger = logging.getLogger("auto_scheduler")
logger = logging.getLogger(__name__)


@tvm._ffi.register_object("auto_scheduler.RecordToFile")
Expand Down
2 changes: 1 addition & 1 deletion python/tvm/auto_scheduler/relay_integration.py
Expand Up @@ -45,7 +45,7 @@
from .utils import get_const_tuple
from .workload_registry import register_workload_tensors

logger = logging.getLogger("auto_scheduler")
logger = logging.getLogger(__name__)


def call_all_topi_funcs(mod, params, target, error_list, opt_level=3):
Expand Down
2 changes: 1 addition & 1 deletion python/tvm/auto_scheduler/search_task.py
Expand Up @@ -38,7 +38,7 @@
from . import _ffi_api

# pylint: disable=invalid-name
logger = logging.getLogger("auto_scheduler")
logger = logging.getLogger(__name__)


@tvm._ffi.register_object("auto_scheduler.HardwareParams")
Expand Down
2 changes: 1 addition & 1 deletion python/tvm/auto_scheduler/task_scheduler.py
Expand Up @@ -36,7 +36,7 @@
from .measure_record import RecordReader
from . import _ffi_api

logger = logging.getLogger("auto_scheduler")
logger = logging.getLogger(__name__)


def make_search_policies(
Expand Down
2 changes: 1 addition & 1 deletion python/tvm/auto_scheduler/workload_registry.py
Expand Up @@ -39,7 +39,7 @@

from .utils import deserialize_args, get_func_name, serialize_args

logger = logging.getLogger("auto_scheduler")
logger = logging.getLogger(__name__)

# Global workload function and hash key registry
# It stores two types of workload:
Expand Down
2 changes: 1 addition & 1 deletion python/tvm/autotvm/graph_tuner/base_graph_tuner.py
Expand Up @@ -137,7 +137,7 @@ def __init__(

# Set up logger
self._verbose = verbose
self._logger = logging.getLogger(name + "_logger")
self._logger = logging.getLogger(__name__ + self._name)
need_file_handler = need_console_handler = True
for handler in self._logger.handlers:
if handler.__class__.__name__ == "FileHandler":
Expand Down
2 changes: 1 addition & 1 deletion python/tvm/autotvm/measure/measure_methods.py
Expand Up @@ -51,7 +51,7 @@
from ..utils import get_const_tuple
from .measure import Builder, MeasureErrorNo, MeasureResult, Runner

logger = logging.getLogger("autotvm")
logger = logging.getLogger(__name__)


class BuildResult(namedtuple("BuildResult", ("filename", "arg_info", "error", "time_cost"))):
Expand Down
2 changes: 1 addition & 1 deletion python/tvm/autotvm/record.py
Expand Up @@ -39,7 +39,7 @@

AUTOTVM_LOG_VERSION = 0.2
_old_version_warning = True
logger = logging.getLogger("autotvm")
logger = logging.getLogger(__name__)

try: # convert unicode to str for python2
_unicode = unicode
Expand Down
2 changes: 1 addition & 1 deletion python/tvm/autotvm/task/dispatcher.py
Expand Up @@ -37,7 +37,7 @@
from .space import FallbackConfigEntity
from .. import env as _env

logger = logging.getLogger("autotvm")
logger = logging.getLogger(__name__)


class DispatchContext(object):
Expand Down
2 changes: 1 addition & 1 deletion python/tvm/autotvm/task/relay_integration.py
Expand Up @@ -30,7 +30,7 @@
from .task import create
from .topi_integration import TaskExtractEnv

logger = logging.getLogger("autotvm")
logger = logging.getLogger(__name__)


# TODO(moreau89) find a more elegant way to lower for VTAs
Expand Down
2 changes: 1 addition & 1 deletion python/tvm/autotvm/tophub.py
Expand Up @@ -59,7 +59,7 @@
"amd_apu": "v0.01",
}

logger = logging.getLogger("autotvm")
logger = logging.getLogger(__name__)


def _alias(name):
Expand Down
2 changes: 1 addition & 1 deletion python/tvm/autotvm/tuner/callback.py
Expand Up @@ -25,7 +25,7 @@
from .. import record
from ..utils import format_si_prefix

logger = logging.getLogger("autotvm")
logger = logging.getLogger(__name__)


def log_to_file(file_out, protocol="json"):
Expand Down
2 changes: 1 addition & 1 deletion python/tvm/autotvm/tuner/sa_model_optimizer.py
Expand Up @@ -28,7 +28,7 @@
from ..utils import sample_ints
from .model_based_tuner import ModelOptimizer, knob2point, point2knob

logger = logging.getLogger("autotvm")
logger = logging.getLogger(__name__)


class SimulatedAnnealingOptimizer(ModelOptimizer):
Expand Down
2 changes: 1 addition & 1 deletion python/tvm/autotvm/tuner/tuner.py
Expand Up @@ -26,7 +26,7 @@

from ..env import GLOBAL_SCOPE

logger = logging.getLogger("autotvm")
logger = logging.getLogger(__name__)


class Tuner(object):
Expand Down
2 changes: 1 addition & 1 deletion python/tvm/autotvm/tuner/xgboost_cost_model.py
Expand Up @@ -31,7 +31,7 @@

xgb = None

logger = logging.getLogger("autotvm")
logger = logging.getLogger(__name__)


class XGBoostCostModel(CostModel):
Expand Down
2 changes: 1 addition & 1 deletion python/tvm/autotvm/utils.py
Expand Up @@ -26,7 +26,7 @@
from tvm.tir import expr
from tvm.contrib.popen_pool import PopenPoolExecutor

logger = logging.getLogger("autotvm")
logger = logging.getLogger(__name__)


class EmptyContext(object):
Expand Down
2 changes: 1 addition & 1 deletion python/tvm/contrib/cutlass/build.py
Expand Up @@ -26,7 +26,7 @@
from .gen_conv2d import CutlassConv2DProfiler
from .library import ConvKind

logger = logging.getLogger("cutlass")
logger = logging.getLogger(__name__)


def _get_cutlass_path():
Expand Down
2 changes: 1 addition & 1 deletion python/tvm/contrib/cutlass/gen_tensor_op.py
Expand Up @@ -30,7 +30,7 @@
EpilogueFunctor,
)

logger = logging.getLogger("cutlass")
logger = logging.getLogger(__name__)


dtype_map = {
Expand Down
2 changes: 1 addition & 1 deletion python/tvm/contrib/download.py
Expand Up @@ -23,7 +23,7 @@
import tempfile
import time

LOG = logging.getLogger("download")
LOG = logging.getLogger(__name__)


def download(url, path, overwrite=False, size_compare=False, retries=3):
Expand Down
2 changes: 1 addition & 1 deletion python/tvm/driver/tvmc/autotuner.py
Expand Up @@ -43,7 +43,7 @@


# pylint: disable=invalid-name
logger = logging.getLogger("TVMC")
logger = logging.getLogger(__name__)


@register_parser
Expand Down
2 changes: 1 addition & 1 deletion python/tvm/driver/tvmc/compiler.py
Expand Up @@ -39,7 +39,7 @@
from .shape_parser import parse_shape_string

# pylint: disable=invalid-name
logger = logging.getLogger("TVMC")
logger = logging.getLogger(__name__)


@register_parser
Expand Down
2 changes: 1 addition & 1 deletion python/tvm/driver/tvmc/composite_target.py
Expand Up @@ -35,7 +35,7 @@


# pylint: disable=invalid-name
logger = logging.getLogger("TVMC")
logger = logging.getLogger(__name__)


# Global dictionary to map targets
Expand Down
2 changes: 1 addition & 1 deletion python/tvm/driver/tvmc/frontends.py
Expand Up @@ -36,7 +36,7 @@


# pylint: disable=invalid-name
logger = logging.getLogger("TVMC")
logger = logging.getLogger(__name__)


class Frontend(ABC):
Expand Down
2 changes: 1 addition & 1 deletion python/tvm/driver/tvmc/main.py
Expand Up @@ -88,7 +88,7 @@ def _main(argv):
if args.verbose > 4:
args.verbose = 4

logging.getLogger("TVMC").setLevel(40 - args.verbose * 10)
logging.getLogger("tvm.driver.tvmc").setLevel(40 - args.verbose * 10)

if args.version:
sys.stdout.write("%s\n" % tvm.__version__)
Expand Down
2 changes: 1 addition & 1 deletion python/tvm/driver/tvmc/runner.py
Expand Up @@ -56,7 +56,7 @@
SUPPORT_MICRO = False

# pylint: disable=invalid-name
logger = logging.getLogger("TVMC")
logger = logging.getLogger(__name__)


@register_parser
Expand Down
2 changes: 1 addition & 1 deletion python/tvm/driver/tvmc/target.py
Expand Up @@ -32,7 +32,7 @@
from tvm.target import Target, TargetKind

# pylint: disable=invalid-name
logger = logging.getLogger("TVMC")
logger = logging.getLogger(__name__)

# We can't tell the type inside an Array but all current options are strings so
# it can default to that. Bool is used alongside Integer but aren't distinguished
Expand Down
2 changes: 1 addition & 1 deletion python/tvm/driver/tvmc/tracker.py
Expand Up @@ -21,7 +21,7 @@
from urllib.parse import urlparse

# pylint: disable=invalid-name
logger = logging.getLogger("TVMC")
logger = logging.getLogger(__name__)


def tracker_host_port_from_cli(rpc_tracker_str):
Expand Down
2 changes: 1 addition & 1 deletion python/tvm/relay/backend/contrib/ethosu/vela_api.py
Expand Up @@ -32,7 +32,7 @@
from tvm.relay.backend.contrib.ethosu import tir_to_cs_translator as tirtocs

# pylint: disable=invalid-name
logger = logging.getLogger("Ethos-U")
logger = logging.getLogger(__name__)

VELA_TO_NP_DTYPES = {
vapi.NpuDataType.UINT8: np.uint8,
Expand Down
4 changes: 2 additions & 2 deletions python/tvm/relay/backend/te_compiler.py
Expand Up @@ -30,8 +30,8 @@
from .. import ty as _ty
from . import _backend

logger = logging.getLogger("te_compiler")
autotvm_logger = logging.getLogger("autotvm")
logger = logging.getLogger(__name__)
autotvm_logger = logging.getLogger(__name__)

_first_warning = True

Expand Down
2 changes: 1 addition & 1 deletion python/tvm/relay/frontend/common.py
Expand Up @@ -44,7 +44,7 @@ def filter(self, record):


# pylint: disable=invalid-name
logger = logging.getLogger("Frontend")
logger = logging.getLogger(__name__)
logger.addFilter(DuplicateFilter())
# Uncomment below line to print all debug msgs
# logger.setLevel(logging.DEBUG)
Expand Down
2 changes: 1 addition & 1 deletion python/tvm/relay/op/contrib/dnnl.py
Expand Up @@ -44,7 +44,7 @@
from ...dataflow_pattern import wildcard, is_op
from .register import register_pattern_table

logger = logging.getLogger("DNNL")
logger = logging.getLogger(__name__)


def _register_external_op_helper(op_name, supported=True):
Expand Down
2 changes: 1 addition & 1 deletion python/tvm/relay/op/contrib/tensorrt.py
Expand Up @@ -29,7 +29,7 @@
from tvm.relay.expr_functor import ExprMutator, ExprVisitor
from tvm.relay.op.contrib.register import register_pattern_table

logger = logging.getLogger("TensorRT")
logger = logging.getLogger(__name__)
supported_types = ["float32", "float16"]


Expand Down