Skip to content
Permalink
Browse files

Merge pull request #2139 from himanshupathak21061998/add-computerror

Adding ComputeError function in lars class. Thanks
  • Loading branch information
walragatver committed Mar 21, 2020
2 parents 26e29f9 + 94f7ff3 commit bc4f4a12504f792a82226713da754911a0d2b913
Showing with 70 additions and 12 deletions.
  1. +3 −0 HISTORY.md
  2. +16 −1 src/mlpack/methods/lars/lars.cpp
  3. +18 −2 src/mlpack/methods/lars/lars.hpp
  4. +33 −9 src/mlpack/tests/lars_test.cpp
@@ -25,6 +25,9 @@
* Add functions to access parameters of `Convolution` and `AtrousConvolution`
layers (#1985).

* Add Compute Error function in lars regression and changing Train function to
return computed error (#2139).

* Add Julia bindings (#1949). Build settings can be controlled with the
`BUILD_JULIA_BINDINGS=(ON/OFF)` and `JULIA_EXECUTABLE=/path/to/julia` CMake
parameters.
@@ -361,7 +361,7 @@ double LARS::Train(const arma::mat& matX,
beta = betaPath.back();

Timer::Stop("lars_regression");
return maxCorr;
return ComputeError(matX, y, !transposeData);
}

double LARS::Train(const arma::mat& data,
@@ -538,3 +538,18 @@ void LARS::CholeskyDelete(const size_t colToKill)
matUtriCholFactor.shed_row(n);
}
}

double LARS::ComputeError(const arma::mat& matX,
const arma::rowvec& y,
const bool rowMajor)
{
if (rowMajor)
{
return arma::accu(arma::pow(y - trans(matX * betaPath.back()), 2.0));
}

else
{
return arma::accu(arma::pow(y - betaPath.back().t() * matX, 2.0));
}
}
@@ -183,7 +183,7 @@ class LARS
* @param responses A vector of targets.
* @param beta Vector to store the solution (the coefficients) in.
* @param transposeData Set to false if the data is row-major.
* @return The final absolute maximum correlation.
* @return minimum cost error(||y-beta*X||2 is used to calculate error).
*/
double Train(const arma::mat& data,
const arma::rowvec& responses,
@@ -202,7 +202,7 @@ class LARS
* @param responses A vector of targets.
* @param transposeData Should be true if the input data is column-major and
* false otherwise.
* @return The final absolute maximum correlation.
* @return minimum cost error(||y-beta*X||2 is used to calculate error).
*/
double Train(const arma::mat& data,
const arma::rowvec& responses,
@@ -244,6 +244,22 @@ class LARS
template<typename Archive>
void serialize(Archive& ar, const unsigned int /* version */);

/**
* Compute cost error of the given data matrix using the
* currently-trained LARS model. Only ||y-beta*X||2 is used to calculate
* cost error.
*
* @param data Column-major input data (or row-major input data if rowMajor =
* true).
* @param responses A vector of targets.
* @param rowMajor Should be true if the data points matrix is row-major and
* false otherwise.
* @return The minimum cost error.
*/
double ComputeError(const arma::mat& matX,
const arma::rowvec& y,
const bool rowMajor = false);

private:
//! Gram matrix.
arma::mat matGramInternal;
@@ -353,7 +353,7 @@ BOOST_AUTO_TEST_CASE(TrainingConstructorWithNonDefaultsTest)
}

/**
* Test that LARS::Train() returns finite correlation value.
* Test that LARS::Train() returns finite error value.
*/
BOOST_AUTO_TEST_CASE(LARSTrainReturnCorrelation)
{
@@ -371,30 +371,54 @@ BOOST_AUTO_TEST_CASE(LARSTrainReturnCorrelation)
// Test with Cholesky decomposition and with lasso.
LARS lars1(true, lambda1, 0.0);
arma::vec betaOpt1;
double maxCorr = lars1.Train(X, y, betaOpt1);
double error = lars1.Train(X, y, betaOpt1);

BOOST_REQUIRE_EQUAL(std::isfinite(maxCorr), true);
BOOST_REQUIRE_EQUAL(std::isfinite(error), true);

// Test without Cholesky decomposition and with lasso.
LARS lars2(false, lambda1, 0.0);
arma::vec betaOpt2;
maxCorr = lars2.Train(X, y, betaOpt2);
error = lars2.Train(X, y, betaOpt2);

BOOST_REQUIRE_EQUAL(std::isfinite(maxCorr), true);
BOOST_REQUIRE_EQUAL(std::isfinite(error), true);

// Test with Cholesky decomposition and with elasticnet.
LARS lars3(true, lambda1, lambda2);
arma::vec betaOpt3;
maxCorr = lars3.Train(X, y, betaOpt3);
error = lars3.Train(X, y, betaOpt3);

BOOST_REQUIRE_EQUAL(std::isfinite(maxCorr), true);
BOOST_REQUIRE_EQUAL(std::isfinite(error), true);

// Test without Cholesky decomposition and with elasticnet.
LARS lars4(false, lambda1, lambda2);
arma::vec betaOpt4;
maxCorr = lars4.Train(X, y, betaOpt4);
error = lars4.Train(X, y, betaOpt4);

BOOST_REQUIRE_EQUAL(std::isfinite(maxCorr), true);
BOOST_REQUIRE_EQUAL(std::isfinite(error), true);
}

/**
* Test that LARS::ComputeError() returns error value less than 1
* and greater than 0.
*/
BOOST_AUTO_TEST_CASE(LARSTestComputeError)
{
arma::mat X;
arma::mat Y;

data::Load("lars_dependent_x.csv", X);
data::Load("lars_dependent_y.csv", Y);

arma::rowvec y = Y.row(0);

LARS lars1(true, 0.1, 0.0);
arma::vec betaOpt1;
double train1 = lars1.Train(X, y, betaOpt1);
double cost = lars1.ComputeError(X, y);

BOOST_REQUIRE_EQUAL(cost <= 1, true);
BOOST_REQUIRE_EQUAL(cost >= 0, true);
BOOST_REQUIRE_EQUAL(cost == train1, true);
}

BOOST_AUTO_TEST_SUITE_END();

0 comments on commit bc4f4a1

Please sign in to comment.
You can’t perform that action at this time.