From ed1647a85bb7fb5c319847f199a95ac79e1ddd24 Mon Sep 17 00:00:00 2001 From: Kexin Zhao Date: Fri, 22 Jun 2018 11:57:15 -0700 Subject: [PATCH 1/7] rewrite mean as function --- src/function.h | 53 +++++++++++++++++++++++------------------------- src/test_tape.cc | 4 +--- 2 files changed, 26 insertions(+), 31 deletions(-) diff --git a/src/function.h b/src/function.h index 25d84af..336b97c 100644 --- a/src/function.h +++ b/src/function.h @@ -48,34 +48,6 @@ class Fill { const framework::AttributeMap attrs_; }; -class Mean { - public: - VariableHandle operator()(VariableHandle var) { - VariableHandle out(new Variable("mean")); - get_global_tape().AddOp("mean", {{"X", {var}}}, {{"Out", {out}}}, {}); - return out; - } -}; - -VariableHandle relu(VariableHandle x) { - VariableHandle out(new Variable("relu")); - get_global_tape().AddOp("relu", {{"X", {x}}}, {{"Out", {out}}}, {}); - return out; -} - -VariableHandle softmax(VariableHandle x) { - VariableHandle out(new Variable("softmax")); - get_global_tape().AddOp("softmax", {{"X", {x}}}, {{"Out", {out}}}, {}); - return out; -} - -VariableHandle cross_entropy(VariableHandle x, VariableHandle label) { - VariableHandle out(new Variable("cross_entropy")); - get_global_tape().AddOp( - "cross_entropy", {{"X", {x}}, {"Label", {label}}}, {{"Y", {out}}}, {}); - return out; -} - class Linear { public: Linear(int in_dim, int out_dim, const std::string &act) @@ -219,6 +191,31 @@ class SGD { VariableHandle learning_rate_; }; +VariableHandle mean(VariableHandle x) { + VariableHandle out(new Variable("mean")); + get_global_tape().AddOp("mean", {{"X", {x}}}, {{"Out", {out}}}, {}); + return out; +} + +VariableHandle relu(VariableHandle x) { + VariableHandle out(new Variable("relu")); + get_global_tape().AddOp("relu", {{"X", {x}}}, {{"Out", {out}}}, {}); + return out; +} + +VariableHandle softmax(VariableHandle x) { + VariableHandle out(new Variable("softmax")); + get_global_tape().AddOp("softmax", {{"X", {x}}}, {{"Out", {out}}}, {}); + return out; +} + +VariableHandle cross_entropy(VariableHandle x, VariableHandle label) { + VariableHandle out(new Variable("cross_entropy")); + get_global_tape().AddOp( + "cross_entropy", {{"X", {x}}, {"Label", {label}}}, {{"Y", {out}}}, {}); + return out; +} + VariableHandle CreateRecordioFileReader(std::string filename, std::vector shape_concat, std::vector ranks, diff --git a/src/test_tape.cc b/src/test_tape.cc index 0d1086e..3cabd7d 100644 --- a/src/test_tape.cc +++ b/src/test_tape.cc @@ -19,9 +19,9 @@ using paddle::tape::VariableHandle; using paddle::tape::Variable; using paddle::tape::Linear; using paddle::tape::Convolution2D; -using paddle::tape::Mean; using paddle::tape::SGD; using paddle::tape::Fill; +using paddle::tape::mean; using paddle::tape::softmax; using paddle::tape::cross_entropy; using paddle::tape::reset_global_tape; @@ -91,7 +91,6 @@ TEST(Tape, TestRelu) { TEST(Tape, TestConv) { Convolution2D conv1(3, 16, 3, "relu"); Convolution2D conv2(16, 1, 3, "relu"); - Mean mean; SGD sgd(0.001); @@ -124,7 +123,6 @@ TEST(Tape, TestConv) { TEST(Tape, TestMLP) { Linear linear1(3, 3, "relu"); Linear linear2(3, 3, "relu"); - Mean mean; SGD sgd(0.001); From 9d4329971c064bd9a5ca644d7483abb53cc234d6 Mon Sep 17 00:00:00 2001 From: Kexin Zhao Date: Fri, 22 Jun 2018 16:47:40 -0700 Subject: [PATCH 2/7] add dropout --- src/function.h | 41 +++++++++++++++++++++++++++-------------- src/test_tape.cc | 17 +++++++++++++++++ 2 files changed, 44 insertions(+), 14 deletions(-) diff --git a/src/function.h b/src/function.h index 469abbd..f7ca1ee 100644 --- a/src/function.h +++ b/src/function.h @@ -57,6 +57,22 @@ class Fill { const framework::AttributeMap attrs_; }; +void init_params(VariableHandle v, + const std::string &initializer, + const framework::AttributeMap &attrs) { + if (initializer == "fill_constant") { + // fill_constant is not OperatorWithKernel, so we can't add it to the tape + framework::OpDesc op_desc = + CreateOpDesc(initializer, {}, {{"Out", {v}}}, attrs); + ScopeWrapper scope({}, {{"Out", {v}}}); + framework::OpRegistry::CreateOp(op_desc)->Run(scope, platform::CPUPlace()); + } else { + Tape init_tape; + init_tape.AddOp(initializer, {}, {{"Out", {v}}}, attrs); + init_tape.Forward(); + } +} + class Linear { public: Linear(int in_dim, int out_dim, const std::string &act) @@ -188,20 +204,17 @@ class SGD { VariableHandle learning_rate_; }; -void init_params(VariableHandle v, - const std::string &initializer, - const framework::AttributeMap &attrs) { - if (initializer == "fill_constant") { - // fill_constant is not OperatorWithKernel, so we can't add it to the tape - framework::OpDesc op_desc = - CreateOpDesc(initializer, {}, {{"Out", {v}}}, attrs); - ScopeWrapper scope({}, {{"Out", {v}}}); - framework::OpRegistry::CreateOp(op_desc)->Run(scope, platform::CPUPlace()); - } else { - Tape init_tape; - init_tape.AddOp(initializer, {}, {{"Out", {v}}}, attrs); - init_tape.Forward(); - } +VariableHandle dropout(VariableHandle x) { + VariableHandle out(new Variable("dropout")); + VariableHandle mask(new Variable("mask")); + get_global_tape().AddOp("dropout", + {{"X", {x}}}, + {{"Out", {out}}, {"Mask", {mask}}}, + {{"dropout_prob", .5f}, + {"is_test", false}, + {"fix_seed", false}, + {"seed", RandomSeed::GetRandomSeed()}}); + return out; } VariableHandle mean(VariableHandle x) { diff --git a/src/test_tape.cc b/src/test_tape.cc index 88ff1e2..bb824e1 100644 --- a/src/test_tape.cc +++ b/src/test_tape.cc @@ -37,6 +37,23 @@ TEST(Tape, TestReader) { LOG(INFO) << *data_label; } +TEST(Tape, TestDropout) { + std::string initializer = "fill_constant"; + paddle::framework::AttributeMap attrs; + attrs["dtype"] = paddle::framework::proto::VarType::Type::VarType_Type_FP32; + attrs["shape"] = std::vector{3, 3}; + attrs["value"] = 1.0f; + Fill filler(initializer, attrs); + + VariableHandle input(new Variable("input")); + filler(input); + auto loss = dropout(input); + LOG(INFO) << input->Value(); + LOG(INFO) << loss->Value(); + + get_global_tape().Backward(loss); +} + TEST(Tape, TestRelu) { std::string initializer = "uniform_random"; paddle::framework::AttributeMap attrs; From 2f27dea83f86adbe3b4e64cc7643d19330bc5165 Mon Sep 17 00:00:00 2001 From: Kexin Zhao Date: Sat, 23 Jun 2018 09:10:17 -0700 Subject: [PATCH 3/7] add dropout and fix value() error --- src/function.h | 17 +++++++++++++++++ src/tape.cc | 3 ++- src/test_tape.cc | 2 ++ 3 files changed, 21 insertions(+), 1 deletion(-) diff --git a/src/function.h b/src/function.h index f7ca1ee..e7ec67c 100644 --- a/src/function.h +++ b/src/function.h @@ -204,6 +204,23 @@ class SGD { VariableHandle learning_rate_; }; +VariableHandle pool2d(VariableHandle x) { + VariableHandle out(new Variable("pool2d")); + get_global_tape().AddOp("pool2d", + {{"X", {x}}}, + {{"Out", {out}}}, + {{"pooling_type", "max"}, + {"ksize", std::vector{2, 2}}, + {"global_pooling", false}, + {"strides", std::vector{1, 1}}, + {"paddings", std::vector{0, 0}}, + {"use_cudnn", false}, + {"ceil_mode", false}, + {"use_mkldnn", false}, + {"data_format", "AnyLayout"}}); + return out; +} + VariableHandle dropout(VariableHandle x) { VariableHandle out(new Variable("dropout")); VariableHandle mask(new Variable("mask")); diff --git a/src/tape.cc b/src/tape.cc index 9cd7748..d08fcb4 100644 --- a/src/tape.cc +++ b/src/tape.cc @@ -118,14 +118,15 @@ void Tape::AddOp(const std::string &type, const VariableHandleMap &in_vars, VariableHandleMap out_vars, const framework::AttributeMap &attrs) { + PADDLE_ENFORCE(!has_been_backwarded_); InferShapeAndVarType(type, in_vars, &out_vars, attrs); tape_.emplace_back(type, in_vars, out_vars, attrs); } void Tape::Forward() { LOG(INFO) << "Starting forward -------------------------"; - PADDLE_ENFORCE(!has_been_backwarded_); while (current_position_ < tape_.size()) { + PADDLE_ENFORCE(!has_been_backwarded_); OpHandle &op = tape_[current_position_]; framework::OpDesc op_desc = CreateOpDesc(op.type_, op.inputs_, op.outputs_, op.attrs_); diff --git a/src/test_tape.cc b/src/test_tape.cc index bb824e1..74368ae 100644 --- a/src/test_tape.cc +++ b/src/test_tape.cc @@ -21,6 +21,7 @@ using paddle::tape::Linear; using paddle::tape::Convolution2D; using paddle::tape::SGD; using paddle::tape::Fill; +using paddle::tape::dropout; using paddle::tape::mean; using paddle::tape::softmax; using paddle::tape::cross_entropy; @@ -52,6 +53,7 @@ TEST(Tape, TestDropout) { LOG(INFO) << loss->Value(); get_global_tape().Backward(loss); + LOG(INFO) << input->Grad()->Value(); } TEST(Tape, TestRelu) { From ea6ff739e1ee4368568a09425a20cde1cf66c494 Mon Sep 17 00:00:00 2001 From: Kexin Zhao Date: Sat, 23 Jun 2018 09:36:56 -0700 Subject: [PATCH 4/7] fix avg loss issue --- src/example/mnist/test_mnist.cc | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/src/example/mnist/test_mnist.cc b/src/example/mnist/test_mnist.cc index 03401a1..dac5215 100644 --- a/src/example/mnist/test_mnist.cc +++ b/src/example/mnist/test_mnist.cc @@ -56,10 +56,11 @@ TEST(Mnist, TestCPU) { auto predict = softmax(linear3(linear2(linear1(data)))); auto loss = mean(cross_entropy(predict, label)); - if (i % print_step == 0) { - avg_loss += - loss->Value().Get().data()[0]; - LOG(INFO) << avg_loss; + + avg_loss += + loss->Value().Get().data()[0]; + if ((i + 1) % print_step == 0) { + LOG(INFO) << avg_loss / print_step; avg_loss = 0; } From 28ffa4237ab7b0de13855d17c21936c753a7694e Mon Sep 17 00:00:00 2001 From: Kexin Zhao Date: Sat, 23 Jun 2018 13:08:31 -0700 Subject: [PATCH 5/7] start to add bn --- src/function.h | 6 ++++-- src/test_tape.cc | 20 ++++++++++++++++++++ 2 files changed, 24 insertions(+), 2 deletions(-) diff --git a/src/function.h b/src/function.h index db6f128..a121242 100644 --- a/src/function.h +++ b/src/function.h @@ -206,7 +206,7 @@ VariableHandle pool2d(VariableHandle x) { get_global_tape().AddOp("pool2d", {{"X", {x}}}, {{"Out", {out}}}, - {{"pooling_type", "max"}, + {{"pooling_type", std::string("max")}, {"ksize", std::vector{2, 2}}, {"global_pooling", false}, {"strides", std::vector{1, 1}}, @@ -214,10 +214,12 @@ VariableHandle pool2d(VariableHandle x) { {"use_cudnn", false}, {"ceil_mode", false}, {"use_mkldnn", false}, - {"data_format", "AnyLayout"}}); + {"data_format", std::string("AnyLayout")}}); return out; } +VariableHandle batchnorm(VariableHandle x) { VariableHandle } + VariableHandle dropout(VariableHandle x) { VariableHandle out(new Variable("dropout")); VariableHandle mask(new Variable("mask")); diff --git a/src/test_tape.cc b/src/test_tape.cc index 4ce1df3..e22f111 100644 --- a/src/test_tape.cc +++ b/src/test_tape.cc @@ -50,6 +50,26 @@ TEST(Tape, TestDropout) { LOG(INFO) << input->Grad()->Value(); } +TEST(Tape, TestPool2d) { + std::string initializer = "uniform_random"; + paddle::framework::AttributeMap attrs; + attrs["min"] = -1.0f; + attrs["max"] = 1.0f; + attrs["dtype"] = paddle::framework::proto::VarType::Type::VarType_Type_FP32; + attrs["seed"] = 123; + attrs["shape"] = std::vector{1, 1, 3, 3}; + Fill filler(initializer, attrs); + + VariableHandle input(new Variable("input")); + filler(input); + auto loss = pool2d(input); + LOG(INFO) << input->Value(); + LOG(INFO) << loss->Value(); + + get_global_tape().Backward(loss); + LOG(INFO) << input->Grad()->Value(); +} + TEST(Tape, TestConv) { Convolution2D conv1(3, 16, 3, "relu"); Convolution2D conv2(16, 1, 3, "relu"); From 69280e773b9df54a0e651b56a61e91fab639c552 Mon Sep 17 00:00:00 2001 From: Kexin Zhao Date: Mon, 25 Jun 2018 15:50:19 -0700 Subject: [PATCH 6/7] add test --- src/function.h | 60 ++++++++++++++++++++++++++++++++++++++++++++---- src/test_tape.cc | 32 ++++++++++++++++++++++++++ 2 files changed, 88 insertions(+), 4 deletions(-) diff --git a/src/function.h b/src/function.h index a121242..8a469e9 100644 --- a/src/function.h +++ b/src/function.h @@ -201,6 +201,62 @@ class SGD { VariableHandle learning_rate_; }; +class BatchNorm { + public: + BatchNorm(int c_in, std::string act) + : scale_(new Variable("BatchNormScale")), + bias_(new Variable("BatchNormBias")), + mean_(new Variable("BatchNormMean")), + variance_(new Variable("BatchNormVariance")), + act_(act) { + // Use fill one to initialize scale and variance + framework::AttributeMap attrs; + attrs["dtype"] = paddle::framework::proto::VarType::Type::VarType_Type_FP32; + attrs["shape"] = std::vector{c_in}; + attrs["value"] = 1.0f; + init_params(scale_, "fill_constant", attrs); + init_params(variance_, "fill_constant", attrs); + + // Use fill zero to initialize bias and mean + attrs["value"] = 0.0f; + init_params(bias_, "fill_constant", attrs); + init_params(mean_, "fill_constant", attrs); + } + + VariableHandle operator()(VariableHandle x) { + VariableHandle pre_act(new Variable("batch_norm")); + VariableHandle tmp_mean(new Variable("tmp_mean")); + VariableHandle tmp_var(new Variable("tmp_var")); + get_global_tape().AddOp("batch_norm", + {{"X", {x}}, + {"Scale", {scale_}}, + {"Bias", {bias_}}, + {"Mean", {mean_}}, + {"Variance", {variance_}}}, + {{"Y", {pre_act}}, + {"MeanOut", {mean_}}, + {"VarianceOut", {variance_}}, + {"SavedMean", {tmp_mean}}, + {"SavedVariance", {tmp_var}}}, + {}); + + VariableHandle post_act(new Variable("batch_norm")); + get_global_tape().AddOp( + act_, {{"X", {pre_act}}}, {{"Out", {post_act}}}, {}); + return post_act; + } + + // Only scale and bias need to be updated by SGD + std::vector Params() { return {scale_, bias_}; } + + private: + VariableHandle scale_; + VariableHandle bias_; + VariableHandle mean_; + VariableHandle variance_; + std::string act_; +}; + VariableHandle pool2d(VariableHandle x) { VariableHandle out(new Variable("pool2d")); get_global_tape().AddOp("pool2d", @@ -211,15 +267,11 @@ VariableHandle pool2d(VariableHandle x) { {"global_pooling", false}, {"strides", std::vector{1, 1}}, {"paddings", std::vector{0, 0}}, - {"use_cudnn", false}, {"ceil_mode", false}, - {"use_mkldnn", false}, {"data_format", std::string("AnyLayout")}}); return out; } -VariableHandle batchnorm(VariableHandle x) { VariableHandle } - VariableHandle dropout(VariableHandle x) { VariableHandle out(new Variable("dropout")); VariableHandle mask(new Variable("mask")); diff --git a/src/test_tape.cc b/src/test_tape.cc index e22f111..ea13113 100644 --- a/src/test_tape.cc +++ b/src/test_tape.cc @@ -21,6 +21,7 @@ using paddle::tape::Linear; using paddle::tape::Convolution2D; using paddle::tape::SGD; using paddle::tape::Fill; +using paddle::tape::BatchNorm; using paddle::tape::dropout; using paddle::tape::mean; using paddle::tape::softmax; @@ -40,6 +41,7 @@ TEST(Tape, TestDropout) { attrs["shape"] = std::vector{3, 3}; Fill filler(initializer, attrs); + reset_global_tape(); VariableHandle input(new Variable("input")); filler(input); auto loss = dropout(input); @@ -60,6 +62,7 @@ TEST(Tape, TestPool2d) { attrs["shape"] = std::vector{1, 1, 3, 3}; Fill filler(initializer, attrs); + reset_global_tape(); VariableHandle input(new Variable("input")); filler(input); auto loss = pool2d(input); @@ -70,6 +73,35 @@ TEST(Tape, TestPool2d) { LOG(INFO) << input->Grad()->Value(); } +TEST(Tape, TestBatchNorm) { + BatchNorm bn(4, "relu"); + SGD sgd(0.001); + + std::string initializer = "uniform_random"; + paddle::framework::AttributeMap attrs; + attrs["min"] = -1.0f; + attrs["max"] = 1.0f; + attrs["dtype"] = paddle::framework::proto::VarType::Type::VarType_Type_FP32; + attrs["seed"] = 123; + attrs["shape"] = std::vector{32, 4, 8, 8}; + Fill filler(initializer, attrs); + + for (int i = 0; i < 2; ++i) { + reset_global_tape(); + + VariableHandle input(new Variable("input")); + filler(input); + + auto loss = bn(input); + + get_global_tape().Backward(loss); + + for (auto w : bn.Params()) { + sgd.Update(w); + } + } +} + TEST(Tape, TestConv) { Convolution2D conv1(3, 16, 3, "relu"); Convolution2D conv2(16, 1, 3, "relu"); From ea4485d11c8705520bad15c12cae1b7c7be346ad Mon Sep 17 00:00:00 2001 From: Kexin Zhao Date: Mon, 25 Jun 2018 17:00:27 -0700 Subject: [PATCH 7/7] fix attr --- src/function.h | 60 ++++++++++++++++++++------------------------------ 1 file changed, 24 insertions(+), 36 deletions(-) diff --git a/src/function.h b/src/function.h index 8a469e9..6699aeb 100644 --- a/src/function.h +++ b/src/function.h @@ -93,17 +93,19 @@ class Linear { init_params(b_, "fill_constant", attrs); } - VariableHandle operator()(VariableHandle input) { + VariableHandle operator()(VariableHandle input, + const framework::AttributeMap &mul_op_attrs = {}, + const framework::AttributeMap &add_op_attrs = {}) { VariableHandle pre_bias(new Variable("linear")); get_global_tape().AddOp("mul", {{"X", {input}}, {"Y", {w_}}}, {{"Out", {pre_bias}}}, - {{"x_num_col_dims", 1}, {"y_num_col_dims", 1}}); + mul_op_attrs); VariableHandle pre_act(new Variable("linear")); get_global_tape().AddOp("elementwise_add", {{"X", {pre_bias}}, {"Y", {b_}}}, {{"Out", {pre_act}}}, - {{"axis", 1}}); + add_op_attrs); VariableHandle post_act(new Variable("linear")); get_global_tape().AddOp( act_, {{"X", {pre_act}}}, {{"Out", {post_act}}}, {}); @@ -120,7 +122,7 @@ class Linear { class Convolution2D { public: - Convolution2D(int c_in, int c_out, int f, std::string act) + Convolution2D(int c_in, int c_out, int f, const std::string &act) : w_(new Variable("ConvolutionWeight")), b_(new Variable("ConvolutionBias")), act_(act) { @@ -142,23 +144,20 @@ class Convolution2D { init_params(b_, "fill_constant", attrs); } - VariableHandle operator()(VariableHandle input) { + VariableHandle operator()( + VariableHandle input, + const framework::AttributeMap &conv_op_attrs = {}, + const framework::AttributeMap &add_op_attrs = {{"axis", 1}}) { VariableHandle pre_bias(new Variable("conv")); get_global_tape().AddOp("conv2d", {{"Input", {input}}, {"Filter", {w_}}}, {{"Output", {pre_bias}}}, - {{"strides", std::vector{1, 1}}, - {"paddings", std::vector{0, 0}}, - {"dilations", std::vector{1, 1}}, - {"groups", 1}, - {"use_cudnn", false}, - {"use_mkldnn", false}, - {"data_format", std::string("AnyLayout")}}); + conv_op_attrs); VariableHandle pre_act(new Variable("conv")); get_global_tape().AddOp("elementwise_add", {{"X", {pre_bias}}, {"Y", {b_}}}, {{"Out", {pre_act}}}, - {{"axis", 1}}); + add_op_attrs); VariableHandle post_act(new Variable("conv")); get_global_tape().AddOp( act_, {{"X", {pre_act}}}, {{"Out", {post_act}}}, {}); @@ -203,7 +202,7 @@ class SGD { class BatchNorm { public: - BatchNorm(int c_in, std::string act) + BatchNorm(int channel_in, const std::string &act) : scale_(new Variable("BatchNormScale")), bias_(new Variable("BatchNormBias")), mean_(new Variable("BatchNormMean")), @@ -212,7 +211,7 @@ class BatchNorm { // Use fill one to initialize scale and variance framework::AttributeMap attrs; attrs["dtype"] = paddle::framework::proto::VarType::Type::VarType_Type_FP32; - attrs["shape"] = std::vector{c_in}; + attrs["shape"] = std::vector{channel_in}; attrs["value"] = 1.0f; init_params(scale_, "fill_constant", attrs); init_params(variance_, "fill_constant", attrs); @@ -223,7 +222,8 @@ class BatchNorm { init_params(mean_, "fill_constant", attrs); } - VariableHandle operator()(VariableHandle x) { + VariableHandle operator()(VariableHandle x, + const framework::AttributeMap &attrs = {}) { VariableHandle pre_act(new Variable("batch_norm")); VariableHandle tmp_mean(new Variable("tmp_mean")); VariableHandle tmp_var(new Variable("tmp_var")); @@ -238,7 +238,7 @@ class BatchNorm { {"VarianceOut", {variance_}}, {"SavedMean", {tmp_mean}}, {"SavedVariance", {tmp_var}}}, - {}); + attrs); VariableHandle post_act(new Variable("batch_norm")); get_global_tape().AddOp( @@ -257,31 +257,19 @@ class BatchNorm { std::string act_; }; -VariableHandle pool2d(VariableHandle x) { +VariableHandle pool2d(VariableHandle x, + const framework::AttributeMap &attrs = {}) { VariableHandle out(new Variable("pool2d")); - get_global_tape().AddOp("pool2d", - {{"X", {x}}}, - {{"Out", {out}}}, - {{"pooling_type", std::string("max")}, - {"ksize", std::vector{2, 2}}, - {"global_pooling", false}, - {"strides", std::vector{1, 1}}, - {"paddings", std::vector{0, 0}}, - {"ceil_mode", false}, - {"data_format", std::string("AnyLayout")}}); + get_global_tape().AddOp("pool2d", {{"X", {x}}}, {{"Out", {out}}}, attrs); return out; } -VariableHandle dropout(VariableHandle x) { +VariableHandle dropout(VariableHandle x, + const framework::AttributeMap &attrs = {}) { VariableHandle out(new Variable("dropout")); VariableHandle mask(new Variable("mask")); - get_global_tape().AddOp("dropout", - {{"X", {x}}}, - {{"Out", {out}}, {"Mask", {mask}}}, - {{"dropout_prob", .5f}, - {"is_test", false}, - {"fix_seed", false}, - {"seed", RandomSeed::GetRandomSeed()}}); + get_global_tape().AddOp( + "dropout", {{"X", {x}}}, {{"Out", {out}}, {"Mask", {mask}}}, attrs); return out; }