Skip to content

Commit

Permalink
LARS float32 / floatmax support clean up
Browse files Browse the repository at this point in the history
Replacing if-SGERROR sets with REQUIRE statements

Fixing memory leaks / implicit type conversion warnings

Adding comment for LARS float support cleanup patch

LARS templated float support cleanup patch

LARS templated float support cleanup patch
  • Loading branch information
c4goldsw committed Aug 3, 2016
1 parent 35d5f1d commit ade10a6
Show file tree
Hide file tree
Showing 4 changed files with 49 additions and 37 deletions.
9 changes: 6 additions & 3 deletions src/shogun/labels/DenseLabels.h
Original file line number Diff line number Diff line change
Expand Up @@ -122,8 +122,11 @@ class CDenseLabels : public CLabels
template<typename ST>
ST get_label_t(int32_t idx)
{
REQUIRE(idx<get_num_labels(), "The provided index (%d) is out of bounds (the last label has index (%d)). "
"Please ensure that you're using a valid index number.", idx, get_num_labels())
REQUIRE(m_labels.vector, "You're attempting to get a label when there are in fact none! "
"Please ensure that you initialized the labels correctly.")
int32_t real_num=m_subset_stack->subset_idx_conversion(idx);
ASSERT(m_labels.vector && idx<get_num_labels())
return m_labels.vector[real_num];
}

Expand Down Expand Up @@ -156,7 +159,7 @@ class CDenseLabels : public CLabels
return get_labels_copy_t<ST>();

SGVector<ST> labels_copy(m_labels.vlen);
for(int i = 0; i < m_labels.vlen; ++i)
for(index_t i = 0; i < m_labels.vlen; ++i)
labels_copy[i] = (ST) m_labels[i];

return labels_copy;
Expand All @@ -174,7 +177,7 @@ class CDenseLabels : public CLabels
if (!m_subset_stack->has_subsets())
{
SGVector<ST> labels_copy(m_labels.vlen);
for(int i = 0; i < m_labels.vlen; ++i)
for(index_t i = 0; i < m_labels.vlen; ++i)
labels_copy[i] = (ST) m_labels[i];

return labels_copy;
Expand Down
36 changes: 15 additions & 21 deletions src/shogun/regression/LeastAngleRegression.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,7 @@ void CLeastAngleRegression::find_max_abs(const std::vector<ST> &vec, const std::
{
imax = -1;
vmax = -1;
for (index_t i=0; i < vec.size(); ++i)
for (size_t i=0; i < vec.size(); ++i)
{
if (ignore_mask[i])
continue;
Expand Down Expand Up @@ -88,30 +88,23 @@ void CLeastAngleRegression::plane_rot(ST x0, ST x1,

bool CLeastAngleRegression::train_machine(CFeatures* data)
{
if (!m_labels)
SG_ERROR("No labels set\n")

if (m_labels->get_label_type() != LT_REGRESSION)
SG_ERROR("Provided labels (%s) are of type (%d) - they should be regression labels (%d) instead.\n",
m_labels->get_name(), m_labels->get_label_type(), LT_REGRESSION, m_labels->get_label_type())
REQUIRE(m_labels->get_label_type() == LT_REGRESSION, "Provided labels (%s) are of type (%d) - they should be regression labels (%d) instead.\n"
, m_labels->get_name(), m_labels->get_label_type(), LT_REGRESSION, m_labels->get_label_type())

if (!data)
{
if(!features)
SG_ERROR("No features provided.\n")

if(features->get_feature_class() != C_DENSE)
SG_ERROR("Feature-class (%d) must be of type C_DENSE (%d)\n", features->get_feature_class(), C_DENSE)
REQUIRE(features, "No features provided.\n")
REQUIRE(features->get_feature_class() == C_DENSE,
"Feature-class (%d) must be of type C_DENSE (%d)\n", features->get_feature_class(), C_DENSE)

data = features;
}
else
if (data->get_feature_class() != C_DENSE)
SG_ERROR("Feature-class (%d) must be of type C_DENSE (%d)\n", features->get_feature_class(), C_DENSE)
REQUIRE(data->get_feature_class() == C_DENSE,
"Feature-class must be of type C_DENSE (%d)\n", data->get_feature_class(), C_DENSE)

if (data->get_num_vectors() != m_labels->get_num_labels())
SG_ERROR("Number of training vectors (%d) does not match number of labels (%d)\n",
data->get_num_vectors(), m_labels->get_num_labels())
REQUIRE(data->get_num_vectors() == m_labels->get_num_labels(), "Number of training vectors (%d) does not match number of labels (%d)\n"
, data->get_num_vectors(), m_labels->get_num_labels())

//check for type of CFeatures, then call the appropriate template method
if(data->get_feature_type() == F_DREAL)
Expand All @@ -123,12 +116,13 @@ bool CLeastAngleRegression::train_machine(CFeatures* data)
else
SG_ERROR("Feature-type (%d) must be of type F_SHORTREAL (%d), F_DREAL (%d) or F_LONGREAL (%d).\n",
data->get_feature_type(), F_SHORTREAL, F_DREAL, F_LONGREAL)

return false;
}

template <typename ST>
bool CLeastAngleRegression::train_machine_templated(CDenseFeatures<ST> * data)
{

std::vector<std::vector<ST>> m_beta_path_t;

int32_t n_fea = data->get_num_features();
Expand Down Expand Up @@ -200,7 +194,7 @@ bool CLeastAngleRegression::train_machine_templated(CDenseFeatures<ST> * data)
map_corr = map_Xy - (map_Xr*map_mu);

// corr_sign = sign(corr)
for (index_t i=0; i < corr.size(); ++i)
for (size_t i=0; i < corr.size(); ++i)
corr_sign[i] = CMath::sign(corr[i]);

// find max absolute correlation in inactive set
Expand Down Expand Up @@ -351,10 +345,10 @@ bool CLeastAngleRegression::train_machine_templated(CDenseFeatures<ST> * data)
}

//copy m_beta_path_t (of type ST) into m_beta_path
for(index_t i = 0; i < m_beta_path_t.size(); ++i)
for(size_t i = 0; i < m_beta_path_t.size(); ++i)
{
std::vector<float64_t> va;
for(index_t p = 0; p < m_beta_path_t[i].size(); ++p){
for(size_t p = 0; p < m_beta_path_t[i].size(); ++p){
va.push_back((float64_t) m_beta_path_t[i][p]);
}
m_beta_path.push_back(va);
Expand Down
24 changes: 12 additions & 12 deletions src/shogun/regression/LeastAngleRegression.h
Original file line number Diff line number Diff line change
Expand Up @@ -124,10 +124,9 @@ class CLeastAngleRegression: public CLinearMachine

void switch_w(int32_t num_variable)
{
if (w.vlen <= 0)
SG_ERROR("Please train the model before updating its parameters")
if (size_t(num_variable) >= m_beta_idx.size() || num_variable < 0)
SG_ERROR("cannot switch to an estimator of %d non-zero coefficients", num_variable)
REQUIRE(w.vlen > 0,"Please train the model (i.e. run the model's train() method) before updating its weights.\n")
REQUIRE(size_t(num_variable) < m_beta_idx.size() && num_variable >= 0,
"Cannot switch to an estimator of %d non-zero coefficients.\n", num_variable)
if (w.vector == NULL)
w = SGVector<float64_t>(w.vlen);

Expand Down Expand Up @@ -188,13 +187,12 @@ class CLeastAngleRegression: public CLinearMachine

protected:
/**
* An interface method to - this is called by the superclass's (CLinearMachine)
* train() method. This method checks to see if data is a dense feature vector,
* and that it's elements are of type float64_t or float32_t. It then calls
* train_machine_templated with the appropriate template parameters (either float64_t or
* float32_t)
* @param data the data being passed to LARs to be trained on
* @see train
* An interface method used call train_machine_templated -
* this is called by the superclass's train method (@see CLinearMachine::train).
* Checks to see if data is a dense feature vector,
* and that it's elements are floating point types. It then calls
* train_machine_templated with the appropriate template parameters
* @param data training data
* @see train_machine_templated
*/
bool train_machine(CFeatures * data);
Expand All @@ -210,14 +208,16 @@ class CLeastAngleRegression: public CLinearMachine
static void plane_rot(ST x0, ST x1,
ST &y0, ST &y1, SGMatrix<ST> &G);

#ifndef SWIG
template <typename ST>
static void find_max_abs(const std::vector<ST> &vec, const std::vector<bool> &ignore_mask,
int32_t &imax, ST& vmax);
#endif

private:
/**
* A templated specialization of the train_machine method
* @param data the data being passed to LARs to be trained on
* @param data training data
* @see train_machine
*/
template <typename ST>
Expand Down
17 changes: 16 additions & 1 deletion tests/unit/regression/lars_unittest.cc
Original file line number Diff line number Diff line change
Expand Up @@ -252,8 +252,23 @@ void lars_n_less_than_d_feature_test_templated()
CRegressionLabels* labels=new CRegressionLabels(lab);
SG_REF(labels);
CLeastAngleRegression* lars=new CLeastAngleRegression(false);
SG_REF(lars)

lars->set_labels(labels);
lars->train(features);

//Catch exceptions thrown when training, clean up
try
{
lars->train(features);
}
catch(...)
{
SG_UNREF(lars);
SG_UNREF(features);
SG_UNREF(labels);

throw;
}

SGVector<float64_t> active2 = lars->get_w_for_var(2);
SGVector<float64_t> active1 = lars->get_w_for_var(1);
Expand Down

0 comments on commit ade10a6

Please sign in to comment.