From f653b391f9d1ac5f386cb8b8b399a85914a6f0ec Mon Sep 17 00:00:00 2001 From: cyberslack_lee Date: Mon, 30 Oct 2023 19:08:28 +0800 Subject: [PATCH] =?UTF-8?q?=E3=80=90PIR=20API=20adaptor=20No.206=E3=80=812?= =?UTF-8?q?07=E3=80=91=20Migrate=20paddle.sign/sinh=20into=20pir=20(#58377?= =?UTF-8?q?)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- python/paddle/tensor/math.py | 2 +- python/paddle/tensor/ops.py | 2 +- test/legacy_test/test_activation_op.py | 7 ++++++- test/legacy_test/test_sign_op.py | 8 ++++---- 4 files changed, 12 insertions(+), 7 deletions(-) diff --git a/python/paddle/tensor/math.py b/python/paddle/tensor/math.py index c5ed7ff655c5c..319504a8db270 100644 --- a/python/paddle/tensor/math.py +++ b/python/paddle/tensor/math.py @@ -4575,7 +4575,7 @@ def sign(x, name=None): Tensor(shape=[4], dtype=float32, place=Place(cpu), stop_gradient=True, [ 1., 0., -1., 1.]) """ - if in_dynamic_mode(): + if in_dynamic_or_pir_mode(): return _C_ops.sign(x) else: check_variable_and_dtype( diff --git a/python/paddle/tensor/ops.py b/python/paddle/tensor/ops.py index 941450d7c8b91..daa5728995249 100644 --- a/python/paddle/tensor/ops.py +++ b/python/paddle/tensor/ops.py @@ -1010,7 +1010,7 @@ def sinh(x, name=None): Tensor(shape=[4], dtype=float32, place=Place(cpu), stop_gradient=True, [-0.41075233, -0.20133601, 0.10016675, 0.30452031]) """ - if in_dynamic_mode(): + if in_dynamic_or_pir_mode(): return _C_ops.sinh(x) else: check_variable_and_dtype( diff --git a/test/legacy_test/test_activation_op.py b/test/legacy_test/test_activation_op.py index a96f892c3f1bd..a70dbf7b1fe4b 100644 --- a/test/legacy_test/test_activation_op.py +++ b/test/legacy_test/test_activation_op.py @@ -915,10 +915,13 @@ def setUp(self): self.convert_input_output() + def test_check_output(self): + self.check_output(check_pir=True) + def test_check_grad(self): if self.dtype == np.float16: return - self.check_grad(['X'], 'Out') + self.check_grad(['X'], 'Out', check_pir=True) class TestSinh_Complex64(TestSinh): @@ -945,6 +948,7 @@ def test_dygraph(self): z_expected = np.sinh(np_x) np.testing.assert_allclose(z, z_expected, rtol=1e-05) + @test_with_pir_api def test_api(self): with static_guard(): test_data_shape = [11, 17] @@ -985,6 +989,7 @@ def test_backward(self): class TestSinhOpError(unittest.TestCase): + @test_with_pir_api def test_errors(self): with static_guard(): with program_guard(Program()): diff --git a/test/legacy_test/test_sign_op.py b/test/legacy_test/test_sign_op.py index 404b52ef1d1fc..fc8a0ed27547c 100644 --- a/test/legacy_test/test_sign_op.py +++ b/test/legacy_test/test_sign_op.py @@ -34,10 +34,10 @@ def setUp(self): self.outputs = {'Out': np.sign(self.inputs['X'])} def test_check_output(self): - self.check_output() + self.check_output(check_pir=True) def test_check_grad(self): - self.check_grad(['X'], 'Out') + self.check_grad(['X'], 'Out', check_pir=True) class TestSignFP16Op(TestSignOp): @@ -70,10 +70,10 @@ def setUp(self): self.place = core.CUDAPlace(0) def test_check_output(self): - self.check_output_with_place(self.place) + self.check_output_with_place(self.place, check_pir=True) def test_check_grad(self): - self.check_grad_with_place(self.place, ['X'], 'Out') + self.check_grad_with_place(self.place, ['X'], 'Out', check_pir=True) class TestSignAPI(unittest.TestCase):