Skip to content

Commit

Permalink
[Eager] Remove redundancy code, fix fp16 case (#42169) (#42215)
Browse files Browse the repository at this point in the history
  • Loading branch information
veyron95 committed Apr 25, 2022
1 parent d5f05bd commit e4da34f
Show file tree
Hide file tree
Showing 2 changed files with 7 additions and 2 deletions.
1 change: 0 additions & 1 deletion python/paddle/fluid/initializer.py
Expand Up @@ -353,7 +353,6 @@ def __call__(self, var, block=None):
out_var = _C_ops.final_state_gaussian_random(
var.shape, self._mean, self._std_dev, self._seed, out_dtype,
place)
out_var._share_underline_tensor_to(var)

if var.dtype in [VarDesc.VarType.FP16, VarDesc.VarType.BF16]:
var_tmp = _C_ops.final_state_cast(out_var, var.dtype)
Expand Down
Expand Up @@ -19,6 +19,7 @@

import paddle.fluid as fluid
from paddle.fluid.dygraph.nn import Conv2D, Pool2D, Linear
from paddle.fluid.framework import _test_eager_guard


class SimpleImgConvPool(fluid.dygraph.Layer):
Expand Down Expand Up @@ -117,7 +118,7 @@ def forward(self, inputs, label):


class TestMnist(unittest.TestCase):
def test_mnist_fp16(self):
def func_mnist_fp16(self):
if not fluid.is_compiled_with_cuda():
return
x = np.random.randn(1, 3, 224, 224).astype("float16")
Expand All @@ -129,6 +130,11 @@ def test_mnist_fp16(self):
loss = model(x, y)
print(loss.numpy())

def test_mnist_fp16(self):
with _test_eager_guard():
self.func_mnist_fp16()
self.func_mnist_fp16()


if __name__ == "__main__":
unittest.main()

0 comments on commit e4da34f

Please sign in to comment.