Skip to content
This repository has been archived by the owner on Nov 17, 2023. It is now read-only.

Commit

Permalink
Fix lint errors
Browse files Browse the repository at this point in the history
  • Loading branch information
bgawrych committed May 14, 2020
1 parent f9472e7 commit 7aba63a
Show file tree
Hide file tree
Showing 3 changed files with 16 additions and 7 deletions.
4 changes: 2 additions & 2 deletions src/operator/nn/log_softmax.cc
Expand Up @@ -65,8 +65,8 @@ static void LogSoftmaxGradComputeExCPU(const nnvm::NodeAttrs& attrs,
MKLDNN_OPCHECK_RUN(fn, attrs, ctx, inputs, req, outputs);
return;
}
FallBackCompute(SoftmaxGradCompute<cpu, op::mshadow_op::left, mxnet_op::log_softmax_bwd>, attrs, ctx,
inputs, req, outputs);
FallBackCompute(SoftmaxGradCompute<cpu, op::mshadow_op::left, mxnet_op::log_softmax_bwd>,
attrs, ctx, inputs, req, outputs);
}

inline static bool LogSoftmaxStorageType(const nnvm::NodeAttrs& attrs,
Expand Down
3 changes: 2 additions & 1 deletion src/operator/nn/mkldnn/mkldnn_base-inl.h
Expand Up @@ -211,7 +211,8 @@ bool SupportQuantizedMKLDNNAct(const ActivationParam &param);
bool SupportMKLDNNConv(const ConvolutionParam &params, const NDArray &input);
bool SupportMKLDNNDeconv(const DeconvolutionParam& params, const NDArray &input);
bool SupportMKLDNNSoftmax(const SoftmaxParam& param, const NDArray &input, const NDArray &output);
bool SupportMKLDNNLogSoftmax(const SoftmaxParam& param, const NDArray &input, const NDArray &output);
bool SupportMKLDNNLogSoftmax(const SoftmaxParam& param, const NDArray &input,
const NDArray &output);
bool SupportMKLDNNSoftmaxOutput(const SoftmaxOutputParam &param);
bool SupportMKLDNNTranspose(const TransposeParam& param, const NDArray &data);
} // namespace op
Expand Down
16 changes: 12 additions & 4 deletions src/operator/nn/mkldnn/mkldnn_log_softmax.cc
Expand Up @@ -104,9 +104,13 @@ static MKLDNNLogSoftmaxFwd &GetLogSoftmaxFwd(const SoftmaxParam &param,
const NDArray &data,
const NDArray &output) {
#if DMLC_CXX11_THREAD_LOCAL
static thread_local std::unordered_map<MKLDNNSoftmaxSignature, MKLDNNLogSoftmaxFwd, OpHash> fwds;
static thread_local std::unordered_map<MKLDNNSoftmaxSignature,
MKLDNNLogSoftmaxFwd,
OpHash> fwds;
#else
static MX_THREAD_LOCAL std::unordered_map<MKLDNNSoftmaxSignature, MKLDNNLogSoftmaxFwd, OpHash> fwds;
static MX_THREAD_LOCAL std::unordered_map<MKLDNNSoftmaxSignature,
MKLDNNLogSoftmaxFwd,
OpHash> fwds;
#endif

MKLDNNSoftmaxSignature key(param);
Expand Down Expand Up @@ -168,9 +172,13 @@ static MKLDNNLogSoftmaxBwd &GetLogSoftmaxBwd(const SoftmaxParam &param,
const std::vector<NDArray> &data,
const std::vector<NDArray> &output) {
#if DMLC_CXX11_THREAD_LOCAL
static thread_local std::unordered_map<MKLDNNSoftmaxSignature, MKLDNNLogSoftmaxBwd, OpHash> bwds;
static thread_local std::unordered_map<MKLDNNSoftmaxSignature,
MKLDNNLogSoftmaxBwd,
OpHash> bwds;
#else
static MX_THREAD_LOCAL std::unordered_map<MKLDNNSoftmaxSignature, MKLDNNLogSoftmaxBwd, OpHash> bwds;
static MX_THREAD_LOCAL std::unordered_map<MKLDNNSoftmaxSignature,
MKLDNNLogSoftmaxBwd,
OpHash> bwds;
#endif

MKLDNNSoftmaxSignature key(param);
Expand Down

0 comments on commit 7aba63a

Please sign in to comment.