Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[Prim] add flag test cases and block prim vjp matmul by default #64220

Merged
merged 2 commits into from
May 14, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
23 changes: 23 additions & 0 deletions python/paddle/base/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -647,3 +647,26 @@ def check_and_set_prim_all_enabled():


check_and_set_prim_all_enabled()


SKIPPED_PRIM_VJP_DEFAULT_OPS = ["matmul_grad"]


def _clear_prim_vjp_skip_default_ops():
for item in SKIPPED_PRIM_VJP_DEFAULT_OPS:
_remove_skip_comp_ops(item)


# Since some decomposition of special ops like matmul_grad will reduce performance and is difficult to optimize currently by CINN.
# This api is used for development for in prim and cinn, and will be removed in future.
def _check_and_set_prim_vjp_skip_default_ops():
flag = os.getenv("FLAGS_prim_vjp_skip_default_ops", "1")
if flag and flag.lower() in ("1", "true"):
_set_prim_backward_blacklist(*SKIPPED_PRIM_VJP_DEFAULT_OPS)
return True
else:
_clear_prim_vjp_skip_default_ops()
return False


_check_and_set_prim_vjp_skip_default_ops()
3 changes: 3 additions & 0 deletions test/legacy_test/op_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -431,6 +431,9 @@ def setUpClass(cls):
cls._check_cinn = False
cls.check_pir_onednn = False

# Todo(CZ): to be removed in future
core._clear_prim_vjp_skip_default_ops()

np.random.seed(123)
random.seed(124)

Expand Down
10 changes: 10 additions & 0 deletions test/prim/pir_prim/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@ set(TEST_PRIM_PURE_PIR_CASES
test_prim_custom_vjp
test_prim_jit
test_pir_prim_flags
test_pir_prim_flags_v2
test_sink_decomp
test_prim_skip_dynamic
test_prim_dynamic
Expand All @@ -24,6 +25,15 @@ foreach(target ${TEST_PRIM_PURE_PIR_CASES})
FLAGS_prim_enable_dynamic=true)
endforeach()

py_test_modules(
test_pir_prim_flags_v3
MODULES
test_pir_prim_flags_v3
ENVS
GLOG_v=1
FLAGS_enable_pir_api=true
FLAGS_prim_vjp_skip_default_ops=0)

set_tests_properties(test_auto_recompute PROPERTIES TIMEOUT 40)
set_tests_properties(test_auto_recompute_dy2static PROPERTIES TIMEOUT 40)

Expand Down
87 changes: 87 additions & 0 deletions test/prim/pir_prim/test_pir_prim_flags_v2.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,87 @@
# Copyright (c) 2024 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

import unittest

import paddle
from paddle.base import core
from paddle.decomposition import decomp


class PrimeNet(paddle.nn.Layer):
def __init__(self):
super().__init__()

def forward(self, x):
x1 = paddle.tanh(x)
x2 = paddle.exp(x)
res = paddle.matmul(x1, x2)
return res


class TestPrimMatmulDefault(unittest.TestCase):
def train(self):
x = paddle.randn([4, 4])
x.stop_gradient = False
net = PrimeNet()
net.forward = paddle.jit.to_static(full_graph=True)(net.forward)
out = net(x)
loss = paddle.mean(out)
loss.backward()
self.check_prim(net)

def check_prim(self, net):
program = net.forward.program_cache.last()[-1][-1].train_program
if isinstance(
program, paddle.jit.dy2static.pir_partial_program.RunnableProgram
):
program = program.program
block = program.global_block()
ops = [op.name() for op in block.ops]
self.assertTrue('pd_op.matmul_grad' in ops)

def test_prim_matmul_default(self):
with decomp.prim_guard():
self.train()


class TestPrimMatmulDefaultRevert(unittest.TestCase):
def train(self):
x = paddle.randn([4, 4])
x.stop_gradient = False
net = PrimeNet()
net.forward = paddle.jit.to_static(full_graph=True)(net.forward)
out = net(x)
loss = paddle.mean(out)
loss.backward()
self.check_prim(net)

def check_prim(self, net):
program = net.forward.program_cache.last()[-1][-1].train_program
if isinstance(
program, paddle.jit.dy2static.pir_partial_program.RunnableProgram
):
program = program.program
block = program.global_block()
ops = [op.name() for op in block.ops]
self.assertTrue('pd_op.matmul_grad' not in ops)

def test_prim_matmul_default(self):
core._clear_prim_vjp_skip_default_ops()
with decomp.prim_guard():
self.train()


if __name__ == '__main__':
unittest.main()
59 changes: 59 additions & 0 deletions test/prim/pir_prim/test_pir_prim_flags_v3.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,59 @@
# Copyright (c) 2024 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

import unittest

import paddle
from paddle.decomposition import decomp


class PrimeNet(paddle.nn.Layer):
def __init__(self):
super().__init__()

def forward(self, x):
x1 = paddle.tanh(x)
x2 = paddle.exp(x)
res = paddle.matmul(x1, x2)
return res


class TestPrimMatmulDefaultRevert(unittest.TestCase):
def train(self):
x = paddle.randn([4, 4])
x.stop_gradient = False
net = PrimeNet()
net.forward = paddle.jit.to_static(full_graph=True)(net.forward)
out = net(x)
loss = paddle.mean(out)
loss.backward()
self.check_prim(net)

def check_prim(self, net):
program = net.forward.program_cache.last()[-1][-1].train_program
if isinstance(
program, paddle.jit.dy2static.pir_partial_program.RunnableProgram
):
program = program.program
block = program.global_block()
ops = [op.name() for op in block.ops]
self.assertTrue('pd_op.matmul_grad' not in ops)

def test_prim_matmul_default(self):
with decomp.prim_guard():
self.train()


if __name__ == '__main__':
unittest.main()