Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[CleanOps]del_rnn_memory_helper_op #57926

Merged
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -620,8 +620,6 @@ void BuildOpFuncList(const platform::Place& place,
"pylayer",
"pylayer_grad"
"recurrent_grad",
"rnn_memory_helper",
"rnn_memory_helper_grad",
"while",
"while_grad"};
bool allow_var_not_in_program = ops_with_var_not_in_program.count(op_type);
Expand Down
77 changes: 0 additions & 77 deletions paddle/fluid/framework/prune_test.cc
Original file line number Diff line number Diff line change
Expand Up @@ -198,80 +198,3 @@ TEST(Prune, multi_target) {
f::Prune(*pdesc, feed_var_names, &pruned);
EXPECT_EQ(pruned.blocks(0).ops_size(), 3);
}

TEST(Prune, recurrrent_op) {
f::ProgramDesc program;
f::BlockDesc *block = program.MutableBlock(0);
f::BlockDesc *sub_block = program.AppendBlock(*block);
AddOp("one_two",
{{"input", {"a"}}},
{{"output", {"b", "c"}}},
f::AttributeMap{},
block);

std::vector<std::string> state_var_name(1, "y");
AddOp("recurrent",
{{"input", {"b", "c"}}},
{{"output", {"b1, c1"}}},
{{"ex_states", state_var_name},
{"states", state_var_name},
{"sub_block", sub_block}},
block);

EXPECT_TRUE(sub_block != nullptr);
AddOp("rnn_memory_helper",
{{"input", {"x"}}},
{{"output", {"y"}}},
f::AttributeMap{},
sub_block);

f::proto::ProgramDesc *pdesc = program.Proto();
pdesc->mutable_blocks(0)->mutable_ops(1)->set_is_target(true);

f::proto::ProgramDesc pruned;
std::set<std::string> feed_var_names = {"a"};

f::Prune(*pdesc, feed_var_names, &pruned);
EXPECT_EQ(pruned.blocks_size(), 2);
EXPECT_EQ(pruned.blocks(0).ops_size(), 2);
EXPECT_EQ(pruned.blocks(1).ops_size(), 1);
}

// If the output of an op modifies feed vars, the op should not clip.
TEST(Prune, recurrrent_op_2) {
f::ProgramDesc program;
f::BlockDesc *block = program.MutableBlock(0);
f::BlockDesc *sub_block = program.AppendBlock(*block);
AddOp("one_two",
{{"input", {"a"}}},
{{"output", {"b", "c"}}},
f::AttributeMap{},
block);

std::vector<std::string> state_var_name(1, "y");
AddOp("recurrent",
{{"input", {"b", "c"}}},
{{"output", {"b1, c1"}}},
{{"ex_states", state_var_name},
{"states", state_var_name},
{"sub_block", sub_block}},
block);

EXPECT_TRUE(sub_block != nullptr);
AddOp("rnn_memory_helper",
{{"input", {"x"}}},
{{"output", {"a"}}},
f::AttributeMap{},
sub_block);

f::proto::ProgramDesc *pdesc = program.Proto();
pdesc->mutable_blocks(0)->mutable_ops(1)->set_is_target(true);

f::proto::ProgramDesc pruned;
std::set<std::string> feed_var_names = {"x", "a"};

f::Prune(*pdesc, feed_var_names, &pruned);
EXPECT_EQ(pruned.blocks_size(), 2);
EXPECT_EQ(pruned.blocks(0).ops_size(), 2);
EXPECT_EQ(pruned.blocks(1).ops_size(), 1);
}
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,6 @@ void analysis::DlnneSubgraphPass::InferShapeForDlnneMainGraph() const {
"fetch",
"recurrent",
"go",
"rnn_memory_helper_grad",
"conditional_block",
"while",
"send",
Expand Down
184 changes: 0 additions & 184 deletions paddle/fluid/operators/rnn_memory_helper_op.cc

This file was deleted.

1 change: 0 additions & 1 deletion paddle/fluid/operators/unity_build_rule.cmake
Original file line number Diff line number Diff line change
Expand Up @@ -227,7 +227,6 @@ register_unity_group(
reverse_op.cc)
register_unity_group(
cc
rnn_memory_helper_op.cc
roi_align_op.cc
roll_op.cc
run_program_op.cc
Expand Down
13 changes: 2 additions & 11 deletions python/paddle/base/backward.py
Original file line number Diff line number Diff line change
Expand Up @@ -1671,17 +1671,8 @@ def _append_backward_vars_(block, start_op_idx, grad_to_var, grad_info_map):
or var in parent_op_vars
]
if not existing_grad_var_ins:
'''
FIXME(paddle-dev, zengjinle): rnn_memory_helper_grad is used
in recurrent op. The input of this op does not even exist in
the program! Therefore, any dependency analysis would not
work to this op! If I do not add the following code, this op
would be pruned, and the calculation result would be wrong.
Maybe we should re-design this op later...
'''
if op_desc.type() not in ['rnn_memory_helper_grad']:
ops_to_remove.append(op_idx)
continue
ops_to_remove.append(op_idx)
continue

# sum may create invalid variable, here to deal with it.
if op_desc.type() == 'sum':
Expand Down
1 change: 0 additions & 1 deletion python/paddle/base/framework.py
Original file line number Diff line number Diff line change
Expand Up @@ -2879,7 +2879,6 @@ class Operator:
'fetch',
'recurrent',
'go',
'rnn_memory_helper_grad',
'conditional_block',
'pylayer',
'while',
Expand Down
1 change: 0 additions & 1 deletion test/ir/inference/program_config.py
Original file line number Diff line number Diff line change
Expand Up @@ -113,7 +113,6 @@ def __repr__(self):
'fetch',
'recurrent',
'go',
'rnn_memory_helper_grad',
'conditional_block',
'static_pylayer',
'while',
Expand Down
Loading