From 24864b905b1d92910a69b44d66347ed5bfe7be12 Mon Sep 17 00:00:00 2001 From: Zonglin Peng Date: Wed, 2 Jul 2025 15:18:07 -0700 Subject: [PATCH] ] add logging init to cadence tests Summary: Default log to INFO, and override global settings TODO: in next diff, add base test class for all OSS unittest classes Differential Revision: D77624992 --- backends/cadence/aot/tests/test_memory_passes.py | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/backends/cadence/aot/tests/test_memory_passes.py b/backends/cadence/aot/tests/test_memory_passes.py index df44ded8516..cfeec93570e 100644 --- a/backends/cadence/aot/tests/test_memory_passes.py +++ b/backends/cadence/aot/tests/test_memory_passes.py @@ -6,6 +6,7 @@ # pyre-strict +import logging import math import unittest from typing import cast, List, Optional, Sequence @@ -39,6 +40,15 @@ class TestMemPlanningPasses(unittest.TestCase): + def setUp(self) -> None: + logging.basicConfig( + format="%(asctime)s,%(msecs)d %(levelname)-8s [%(filename)s:%(lineno)d] %(message)s", + datefmt="%Y-%m-%d:%H:%M:%S", + level=logging.getLevelName(logging.INFO), + force=True, + ) + return super().setUp() + def test_calculate_peak_memory_pass(self) -> None: class PeakMemoryTestModel(torch.nn.Module): def __init__(self, input_dim: int, hidden_dim: int, output_dim: int):