Skip to content

Commit

Permalink
[tests] clean paddle.paddle (PaddlePaddle#60380)
Browse files Browse the repository at this point in the history
  • Loading branch information
gouzil authored and Wanglongzhi2001 committed Jan 7, 2024
1 parent bdbbfe7 commit 6783f63
Show file tree
Hide file tree
Showing 9 changed files with 15 additions and 15 deletions.
2 changes: 1 addition & 1 deletion python/paddle/incubate/optimizer/functional/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -72,7 +72,7 @@ def false_fn():
paddle.static.nn.cond(is_symmetric, None, false_fn)
# eigvals only support cpu
paddle.set_device("cpu")
eigvals = paddle.paddle.linalg.eigvals(H0)
eigvals = paddle.linalg.eigvals(H0)
is_positive = paddle.all(eigvals.real() > 0.0) and paddle.all(
eigvals.imag() == 0.0
)
Expand Down
2 changes: 1 addition & 1 deletion test/auto_parallel/test_auto_conditional_block.py
Original file line number Diff line number Diff line change
Expand Up @@ -80,7 +80,7 @@ def forward(self, input):


def loss_func(pred, label):
error_cost = paddle.paddle.nn.functional.square_error_cost(pred, label)
error_cost = paddle.nn.functional.square_error_cost(pred, label)
error_cost = error_cost[error_cost > 0].astype("float32")
loss = paddle.mean(error_cost)
return loss
Expand Down
2 changes: 1 addition & 1 deletion test/ipu/test_dy2static_fp16_ipu.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ def forward(self, x, target=None):
x = paddle.flatten(x, 1, -1)
if target is not None:
x = paddle.nn.functional.softmax(x)
loss = paddle.paddle.nn.functional.cross_entropy(
loss = paddle.nn.functional.cross_entropy(
x, target, reduction='none', use_softmax=False
)
if self.use_ipu:
Expand Down
4 changes: 2 additions & 2 deletions test/ipu/test_dy2static_ipu.py
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,7 @@ def forward(self, x, target=None):
if target is not None:
if self.use_softmax:
x = paddle.nn.functional.softmax(x)
loss = paddle.paddle.nn.functional.cross_entropy(
loss = paddle.nn.functional.cross_entropy(
x, target, reduction='none', use_softmax=False
)
if self.use_reduction:
Expand Down Expand Up @@ -219,7 +219,7 @@ def create_model(self, use_ipu=False):

class TestWithoutIdentityLoss2(TestBase):
def set_op_attrs(self):
self.loss_op = paddle.paddle.nn.functional.softmax_with_cross_entropy
self.loss_op = paddle.nn.functional.softmax_with_cross_entropy

def set_data_feed(self):
self.data = paddle.uniform((8, 3, 10, 10), dtype='float32')
Expand Down
2 changes: 1 addition & 1 deletion test/ipu/test_modelruntime_ipu.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ def forward(self, x, target=None):
x = paddle.flatten(x, 1, -1)
if target is not None:
x = paddle.nn.functional.softmax(x)
loss = paddle.paddle.nn.functional.cross_entropy(
loss = paddle.nn.functional.cross_entropy(
x, target, reduction='none', use_softmax=False
)
return x, loss
Expand Down
2 changes: 1 addition & 1 deletion test/ipu/test_print_op_ipu.py
Original file line number Diff line number Diff line change
Expand Up @@ -120,7 +120,7 @@ def forward(self, x, target=None):
x = paddle.flatten(x, 1, -1)
if target is not None:
x = paddle.nn.functional.softmax(x)
loss = paddle.paddle.nn.functional.cross_entropy(
loss = paddle.nn.functional.cross_entropy(
x, target, reduction='none', use_softmax=False
)
loss = paddle.incubate.identity_loss(loss, 1)
Expand Down
4 changes: 2 additions & 2 deletions test/legacy_test/test_conv2d_transpose_op.py
Original file line number Diff line number Diff line change
Expand Up @@ -1349,7 +1349,7 @@ def var_prefix(self):
def call_func(self, x):
w_var = paddle.randn((3, 6, 3, 3), dtype='float32')
output_size = paddle.assign([17])
out = paddle.paddle.nn.functional.conv2d_transpose(
out = paddle.nn.functional.conv2d_transpose(
x, w_var, stride=2, output_size=output_size
)
return out
Expand Down Expand Up @@ -1388,7 +1388,7 @@ def path_prefix(self):
def call_func(self, x):
w_var = paddle.randn((3, 6, 3, 3), dtype='float32')
output_size = [17, paddle.assign([17])]
out = paddle.paddle.nn.functional.conv2d_transpose(
out = paddle.nn.functional.conv2d_transpose(
x, w_var, stride=2, output_size=output_size
)
return out
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -58,7 +58,7 @@ def test_a_sync_optimizer3(self):
size=[1000000000, 100000],
param_attr=paddle.base.ParamAttr(
name="embedding",
initializer=paddle.paddle.nn.initializer.Constant(value=0.01),
initializer=paddle.nn.initializer.Constant(value=0.01),
),
is_sparse=True,
)
Expand Down
10 changes: 5 additions & 5 deletions test/legacy_test/test_fused_multi_transformer_op.py
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,7 @@ def setUp(self):
self.__class__.no_need_check_grad = False

bias_attr = paddle.base.ParamAttr(
initializer=paddle.paddle.nn.initializer.Constant(value=0.0005)
initializer=paddle.nn.initializer.Constant(value=0.0005)
)
self.q_proj = Linear(
self.embed_dim,
Expand Down Expand Up @@ -1383,16 +1383,16 @@ def config(self):
self.has_attn_mask = False
self.x_type = np.float32
self.weight_attr = paddle.ParamAttr(
initializer=paddle.paddle.nn.initializer.Constant(0.0)
initializer=paddle.nn.initializer.Constant(0.0)
)
self.bias_attr = paddle.ParamAttr(
initializer=paddle.paddle.nn.initializer.Constant(0.0005)
initializer=paddle.nn.initializer.Constant(0.0005)
)
self.ln_w_attr = paddle.ParamAttr(
initializer=paddle.paddle.nn.initializer.Constant(1.0)
initializer=paddle.nn.initializer.Constant(1.0)
)
self.ln_b_attr = paddle.ParamAttr(
initializer=paddle.paddle.nn.initializer.Constant(0.0)
initializer=paddle.nn.initializer.Constant(0.0)
)

def test_fused_multi_transformer_op(self):
Expand Down

0 comments on commit 6783f63

Please sign in to comment.