Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

fluid API magration : Assert, increment, cond #48885

Merged
merged 20 commits into from
Dec 12, 2022
Merged
Original file line number Diff line number Diff line change
Expand Up @@ -64,7 +64,7 @@ class HybridParallelInferenceHelper:
element_in_arr = layers.array_read(array=arr, i=step_idx)
# write placehold data to global lod_tensor_array,
# it need for send_v2 of lod_tensor_array
layers.increment(x=step_idx, value=1.0, in_place=True)
paddle.increment(x=step_idx, value=1.0)
layers.array_write(element_in_arr, i=step_idx, array=arr)

with paddle.fluid.device_guard(f'{device}:0'):
Expand Down Expand Up @@ -137,7 +137,7 @@ class HybridParallelInferenceHelper:
with while_op.block():
with paddle.fluid.device_guard(f'{device}:all'):
input = layers.array_read(array=data, i=step_idx)
layers.increment(x=step_idx, value=1.0, in_place=True)
paddle.increment(x=step_idx, value=1.0)
layers.array_write(input, i=step_idx, array=data)

with paddle.fluid.device_guard(f'{device}:0'):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -286,7 +286,7 @@ def true_apply_gradient():
)
new_grad.op._set_attr(OP_ROLE_KEY, op_maker.OpRole.Optimize)

layers.cond(cond_var, true_fn=true_apply_gradient, false_fn=None)
paddle.static.nn.cond(cond_var, true_fn=true_apply_gradient, false_fn=None)
cond_op = main_program.global_block().ops[-1]
cond_op._set_attr(OP_ROLE_KEY, OpRole.Optimize)

Expand Down
2 changes: 1 addition & 1 deletion python/paddle/fluid/contrib/slim/quantization/adaround.py
Original file line number Diff line number Diff line change
Expand Up @@ -83,7 +83,7 @@ def round_loss_fn():

return round_loss

round_loss = fluid.layers.cond(
round_loss = paddle.static.nn.cond(
warm_start,
lambda: fluid.layers.fill_constant(
shape=[1], dtype='float32', value=0.0
Expand Down
Loading