Skip to content

Commit

Permalink
add checkgradient and update passes
Browse files Browse the repository at this point in the history
  • Loading branch information
Manthan-R-Sheth committed Apr 3, 2018
1 parent d9eb640 commit dd5c8c8
Show file tree
Hide file tree
Showing 2 changed files with 47 additions and 12 deletions.
17 changes: 5 additions & 12 deletions src/mlpack/methods/ann/layer/flexible_relu_impl.hpp
Expand Up @@ -43,10 +43,7 @@ template<typename InputType, typename OutputType>
void FlexibleReLU<InputDataType, OutputDataType>::Forward(
const InputType&& input, OutputType&& output)
{
int i = -1;
output = arma::zeros<InputType>(input.n_rows, input.n_cols);
output.transform([input, &i, this](double val) { ++i;
return (std::max(input(i), 0.0) + alpha(0)); } );
output = arma::clamp(input, 0.0, DBL_MAX) + alpha(0);
}

template<typename InputDataType, typename OutputDataType>
Expand All @@ -56,11 +53,8 @@ void FlexibleReLU<InputDataType, OutputDataType>::Backward(
{
DataType derivative;
//! Compute the first derivative of FlexibleReLU function.
derivative.set_size(input.n_rows, input.n_cols);
int i = -1;
derivative.transform([input, &i](double val) { ++i;
return (input(i) > 0? 1 : 0); } );

derivative = arma::sign(input);
derivative.elem(arma::find(derivative < 0.0)) += 1;
g = gy % derivative;
}

Expand All @@ -74,9 +68,8 @@ void FlexibleReLU<InputDataType, OutputDataType>::Gradient(
{
gradient = arma::zeros<arma::Mat<eT>>(1, 1);
}

arma::mat zeros = arma::zeros<arma::Mat<eT>>(input.n_rows, input.n_cols);
gradient(0) = arma::accu(error % arma::min(zeros, input)) / input.n_cols;
gradient(0) = arma::accu(error % arma::clamp(input, -DBL_MAX, 0.0))
/ input.n_cols;
}


Expand Down
42 changes: 42 additions & 0 deletions src/mlpack/tests/ann_layer_test.cpp
Expand Up @@ -631,6 +631,48 @@ BOOST_AUTO_TEST_CASE(JacobianFlexibleReLULayerTest)
}
}

/**
* Flexible ReLU layer numerically gradient test.
*/
BOOST_AUTO_TEST_CASE(GradientFlexibleReLULayerTest)
{
// Add function gradient instantiation.
struct GradientFunction
{
GradientFunction()
{
input = arma::randn(10, 1);
target = arma::mat("1");

model = new FFN<NegativeLogLikelihood<>, NguyenWidrowInitialization>(
input, target);
model->Add<Linear<> >(10, 2);
model->Add<FlexibleReLU<> >(0.05);
model->Add<LogSoftMax<> >();
}

~GradientFunction()
{
delete model;
}

double Gradient(arma::mat& gradient) const
{
arma::mat output;
double error = model->Evaluate(model->Parameters(), 0, 1);
model->Gradient(model->Parameters(), 0, gradient, 1);
return error;
}

arma::mat& Parameters() { return model->Parameters(); }

FFN<NegativeLogLikelihood<>, NguyenWidrowInitialization>* model;
arma::mat input, target;
} function;

BOOST_REQUIRE_LE(CheckGradient(function), 1e-4);
}

/**
* Jacobian MultiplyConstant module test.
*/
Expand Down

0 comments on commit dd5c8c8

Please sign in to comment.