From 0004b1f39c465b5ce8f088d671e994a29a43b496 Mon Sep 17 00:00:00 2001 From: Clark Kang Date: Mon, 6 Oct 2025 10:20:50 -0700 Subject: [PATCH] Remove logger level settings Summary: - Please see comments in D83398754. It is recommended set logging level at the top level only, so remove module-level debug setting in: - `early_stop_checker`, as well as avoiding the spamming of logs. - `env`, as well as changing `debug` logs to `info`, since this is usually a one-off setting in the flow, which is not supposed to spam the logs. Differential Revision: D83985240 --- torchtnt/utils/early_stop_checker.py | 1 - torchtnt/utils/env.py | 9 ++++----- 2 files changed, 4 insertions(+), 6 deletions(-) diff --git a/torchtnt/utils/early_stop_checker.py b/torchtnt/utils/early_stop_checker.py index a4cd1664d4..6e404846d5 100644 --- a/torchtnt/utils/early_stop_checker.py +++ b/torchtnt/utils/early_stop_checker.py @@ -14,7 +14,6 @@ from typing_extensions import final, Literal _log: logging.Logger = logging.getLogger(__name__) -_log.setLevel(logging.DEBUG) @final diff --git a/torchtnt/utils/env.py b/torchtnt/utils/env.py index e8f991e98d..d058f6cfc9 100644 --- a/torchtnt/utils/env.py +++ b/torchtnt/utils/env.py @@ -26,7 +26,6 @@ from typing_extensions import Literal _log: logging.Logger = logging.getLogger(__name__) -_log.setLevel(logging.DEBUG) # Set logger level to DEBUG to see all messages def _check_dist_env() -> bool: @@ -136,7 +135,7 @@ def seed(seed: int, deterministic: Optional[Union[str, int]] = None) -> None: raise ValueError( f"Invalid seed value provided: {seed}. Value must be in the range [{min_val}, {max_val}]" ) - _log.debug(f"Setting seed to {seed}") + _log.info(f"Setting seed to {seed}") torch.manual_seed(seed) np.random.seed(seed) @@ -144,15 +143,15 @@ def seed(seed: int, deterministic: Optional[Union[str, int]] = None) -> None: os.environ["PYTHONHASHSEED"] = str(seed) if deterministic is not None: - _log.debug(f"Setting deterministic debug mode to {deterministic}") + _log.info(f"Setting deterministic debug mode to {deterministic}") torch.set_deterministic_debug_mode(deterministic) deterministic_debug_mode = torch.get_deterministic_debug_mode() if deterministic_debug_mode == 0: - _log.debug("Disabling cuDNN deterministic mode") + _log.info("Disabling cuDNN deterministic mode") torch.backends.cudnn.deterministic = False torch.backends.cudnn.benchmark = True else: - _log.debug("Enabling cuDNN deterministic mode") + _log.info("Enabling cuDNN deterministic mode") torch.backends.cudnn.deterministic = True torch.backends.cudnn.benchmark = False # reference: https://docs.nvidia.com/cuda/cublas/index.html#cublasApi_reproducibility