From 3769be57e8ab5a57648f021626e9bd46e7887fba Mon Sep 17 00:00:00 2001 From: "Li, Hao H" Date: Fri, 19 Apr 2019 13:22:58 +0800 Subject: [PATCH] test compile --- src/operator/rnn.cc | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/src/operator/rnn.cc b/src/operator/rnn.cc index ce58c89bcd4c..85b439dd3274 100644 --- a/src/operator/rnn.cc +++ b/src/operator/rnn.cc @@ -144,11 +144,11 @@ inline static bool RNNStorageType(const nnvm::NodeAttrs& attrs, std::vector *in_attrs, std::vector *out_attrs) { DispatchMode wanted_mode = DispatchMode::kFCompute; -/* + #if MXNET_USE_MKLDNN == 1 wanted_mode = DispatchMode::kFComputeEx; #endif -*/ + return storage_type_assign(out_attrs, mxnet::kDefaultStorage, dispatch_mode, wanted_mode); } @@ -259,6 +259,7 @@ The definition of GRU here is slightly different from paper but compatible with .set_attr("FCreateOpState", CreateRNNState) .set_attr("FStatefulCompute", RNNStatefulCompute) #if MXNET_USE_MKLDNN == 1 +.set_attr("TIsMKLDNN", true) .set_attr("FStatefulComputeEx", RNNStatefulComputeCPU) #endif .set_attr("FGradient", RNNGrad{"_backward_RNN"}) @@ -294,6 +295,7 @@ NNVM_REGISTER_OP(_backward_RNN) .set_attr("TIsLayerOpBackward", true) .set_attr("TIsBackward", true) #if MXNET_USE_MKLDNN == 1 +.set_attr("TIsMKLDNN", true) .set_attr("FStatefulComputeEx", RNNStatefulGradComputeCPU) #endif .set_attr("FStatefulCompute", RNNStatefulGradCompute);