diff --git a/python/paddle/tensor/math.py b/python/paddle/tensor/math.py index 467c7f7ab88f1..811f28c1ba97b 100644 --- a/python/paddle/tensor/math.py +++ b/python/paddle/tensor/math.py @@ -171,7 +171,7 @@ def log(x, name=None): [[0.69314718, 1.09861231, 1.38629436], [1.94591010, 2.07944155, 2.19722462]]) """ - if in_dynamic_mode(): + if in_dynamic_or_pir_mode(): return _C_ops.log(x) else: check_variable_and_dtype( diff --git a/test/legacy_test/test_activation_op.py b/test/legacy_test/test_activation_op.py index c5fec28242344..5c4ed1a242c1c 100644 --- a/test/legacy_test/test_activation_op.py +++ b/test/legacy_test/test_activation_op.py @@ -3257,10 +3257,13 @@ def setUp(self): def if_enable_cinn(self): pass + def test_check_output(self): + self.check_output(check_new_ir=True) + def test_check_grad(self): if self.dtype == np.float16: return - self.check_grad(['X'], 'Out', check_prim=True) + self.check_grad(['X'], 'Out', check_prim=True, check_new_ir=True) class Test_Log_Op_Fp16(unittest.TestCase): @@ -4608,7 +4611,7 @@ def test_check_grad(self): create_test_act_fp16_class(TestELU) create_test_act_fp16_class(TestCELU) create_test_act_fp16_class(TestReciprocal) -create_test_act_fp16_class(TestLog, check_prim=True) +create_test_act_fp16_class(TestLog, check_prim=True, check_new_ir=True) if core.is_compiled_with_rocm(): create_test_act_fp16_class(TestLog2) else: @@ -4748,7 +4751,7 @@ def test_check_grad(self): create_test_act_bf16_class(TestELU) create_test_act_bf16_class(TestCELU) create_test_act_bf16_class(TestReciprocal) -create_test_act_bf16_class(TestLog, check_prim=True) +create_test_act_bf16_class(TestLog, check_prim=True, check_new_ir=True) if core.is_compiled_with_rocm(): create_test_act_bf16_class(TestLog2) else: