Skip to content

Commit

Permalink
rm autograd func dynamic eager tests (PaddlePaddle#48788)
Browse files Browse the repository at this point in the history
  • Loading branch information
yjjiang11 committed Dec 8, 2022
1 parent c05dee7 commit 9c8aba8
Showing 1 changed file with 3 additions and 46 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,6 @@
import paddle
import paddle.fluid as fluid
import paddle.nn.functional as F
from paddle.fluid.framework import _test_eager_guard
from paddle.incubate.autograd.utils import as_tensors


Expand Down Expand Up @@ -201,14 +200,6 @@ def func_vjp_aliased_input(self):
self.check_results(ref_result, aliased_result)

def test_all_cases(self):
with _test_eager_guard():
self.func_vjp_i1o1()
self.func_vjp_i2o1()
self.func_vjp_i2o2()
self.func_vjp_i2o2_omitting_v()
self.func_vjp_nested()
self.func_vjp_aliased_input()

self.func_vjp_i1o1()
self.func_vjp_i2o1()
self.func_vjp_i2o2()
Expand Down Expand Up @@ -237,17 +228,12 @@ def test_input_single_tensor(self):
),
)
class TestVJPException(unittest.TestCase):
def func_vjp(self):
def test_vjp(self):
with self.assertRaises(self.expected_exception):
paddle.incubate.autograd.vjp(
self.fun, paddle.to_tensor(self.xs), paddle.to_tensor(self.v)
)

def test_all_cases(self):
with _test_eager_guard():
self.func_vjp()
self.func_vjp()


def jac(grad_fn, f, inputs):
assert grad_fn in [
Expand Down Expand Up @@ -324,11 +310,6 @@ def func_jvp_i2o2_omitting_v(self):
self.check_results(results_omitting_v, results_with_v)

def test_all_cases(self):
with _test_eager_guard():
self.func_jvp_i1o1()
self.func_jvp_i2o1()
self.func_jvp_i2o2()
self.func_jvp_i2o2_omitting_v()
self.func_jvp_i1o1()
self.func_jvp_i2o1()
self.func_jvp_i2o2()
Expand Down Expand Up @@ -372,7 +353,7 @@ def setUp(self):
.get("atol")
)

def func_jacobian(self):
def test_jacobian(self):
xs = (
[paddle.to_tensor(x) for x in self.xs]
if isinstance(self.xs, typing.Sequence)
Expand Down Expand Up @@ -409,11 +390,6 @@ def _get_expected(self):
)
return utils._np_concat_matrix_sequence(jac, utils.MatrixFormat.NM)

def test_all_cases(self):
with _test_eager_guard():
self.func_jacobian()
self.func_jacobian()


@utils.place(config.DEVICES)
@utils.parameterize(
Expand Down Expand Up @@ -451,7 +427,7 @@ def setUp(self):
.get("atol")
)

def func_jacobian(self):
def test_jacobian(self):
xs = (
[paddle.to_tensor(x) for x in self.xs]
if isinstance(self.xs, typing.Sequence)
Expand Down Expand Up @@ -505,11 +481,6 @@ def _get_expected(self):
jac, utils.MatrixFormat.NBM, utils.MatrixFormat.BNM
)

def test_all_cases(self):
with _test_eager_guard():
self.func_jacobian()
self.func_jacobian()


class TestHessianNoBatch(unittest.TestCase):
@classmethod
Expand Down Expand Up @@ -607,13 +578,6 @@ def func(x):
paddle.incubate.autograd.Hessian(func, paddle.ones([3]))

def test_all_cases(self):
with _test_eager_guard():
self.setUpClass()
self.func_single_input()
self.func_multi_input()
self.func_allow_unused_true()
self.func_create_graph_true()
self.func_out_not_single()
self.setUpClass()
self.func_single_input()
self.func_multi_input()
Expand Down Expand Up @@ -744,13 +708,6 @@ def func(x):
)

def test_all_cases(self):
with _test_eager_guard():
self.setUpClass()
self.func_single_input()
self.func_multi_input()
self.func_allow_unused()
self.func_stop_gradient()
self.func_out_not_single()
self.setUpClass()
self.func_single_input()
self.func_multi_input()
Expand Down

0 comments on commit 9c8aba8

Please sign in to comment.