Skip to content
This repository has been archived by the owner on Nov 17, 2023. It is now read-only.

Commit

Permalink
Get rid of older code
Browse files Browse the repository at this point in the history
  • Loading branch information
Hao Jin committed Mar 12, 2018
1 parent 700f9bf commit a60d44b
Showing 1 changed file with 0 additions and 2 deletions.
2 changes: 0 additions & 2 deletions src/operator/l2_normalization-inl.h
Expand Up @@ -95,7 +95,6 @@ class L2NormalizationOp : public Operator {
.get_with_shape<xpu, 2, DType>(dshape, s);
Tensor<xpu, 1, DType> norm = out_data[l2_normalization::kNorm].get<xpu, 1, DType>(s);
norm = sumall_except_dim<0>(F<mxnet::op::mshadow_op::square>(data));
// norm = F<mxnet::op::mshadow_op::square_root>(norm + DType(param_.eps));
MXNET_ASSIGN_REQ_SWITCH(req[0], Req, {
mxnet_op::Kernel<mxnet_op::op_with_req<mxnet::op::mshadow_op::plus, Req>, xpu>::Launch(
s, norm.size(0), norm.dptr_, norm.dptr_, DType(param_.eps));
Expand All @@ -114,7 +113,6 @@ class L2NormalizationOp : public Operator {
Tensor<xpu, 2, DType> norm = out_data[l2_normalization::kNorm]
.get_with_shape<xpu, 2, DType>(norm_shape, s);
norm = reduce_with_axis<red::sum, false>(F<mxnet::op::mshadow_op::square>(data), 1);
// norm = F<mxnet::op::mshadow_op::square_root>(norm + DType(param_.eps));
MXNET_ASSIGN_REQ_SWITCH(req[0], Req, {
mxnet_op::Kernel<mxnet_op::op_with_req<mxnet::op::mshadow_op::plus, Req>, xpu>::Launch(
s, norm.size(0) * norm.size(1), norm.dptr_, norm.dptr_, DType(param_.eps));
Expand Down

0 comments on commit a60d44b

Please sign in to comment.