Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Use get and set in regression classes #3734

Merged
merged 1 commit into from Mar 24, 2017
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
6 changes: 3 additions & 3 deletions src/shogun/regression/KernelRidgeRegression.cpp
Expand Up @@ -30,15 +30,15 @@ CKernelRidgeRegression::CKernelRidgeRegression(float64_t tau, CKernel* k, CLabel
{
init();

m_tau=tau;
set_tau(tau);
set_labels(lab);
set_kernel(k);
}

void CKernelRidgeRegression::init()
{
m_tau=1e-6;
m_epsilon=0.0001;
set_tau(1e-6);
set_epsilon(0.0001);
SG_ADD(&m_tau, "tau", "Regularization parameter", MS_AVAILABLE);
}

Expand Down
19 changes: 10 additions & 9 deletions src/shogun/regression/LeastAngleRegression.cpp
Expand Up @@ -28,7 +28,7 @@ CLeastAngleRegression::CLeastAngleRegression(bool lasso) :
CLinearMachine(), m_lasso(lasso),
m_max_nonz(0), m_max_l1_norm(0)
{
m_epsilon = CMath::MACHINE_EPSILON;
set_epsilon(CMath::MACHINE_EPSILON);
SG_ADD(&m_epsilon, "epsilon", "Epsilon for early stopping", MS_AVAILABLE);
SG_ADD(&m_max_nonz, "max_nonz", "Max number of non-zero variables", MS_AVAILABLE);
SG_ADD(&m_max_l1_norm, "max_l1_norm", "Max l1-norm of estimator", MS_AVAILABLE);
Expand Down Expand Up @@ -185,7 +185,7 @@ bool CLeastAngleRegression::train_machine_templated(CDenseFeatures<ST> * data)
// main loop
//========================================
int32_t nloop=0;
while (m_num_active < max_active_allowed && max_corr/n_vec > m_epsilon && !stop_cond)
while (m_num_active < max_active_allowed && max_corr/n_vec > get_epsilon() && !stop_cond)
{
// corr = X' * (y-mu) = - X'*mu + Xy
typename SGVector<ST>::EigenVectorXtMap map_corr(&corr[0], n_fea);
Expand Down Expand Up @@ -298,17 +298,18 @@ bool CLeastAngleRegression::train_machine_templated(CDenseFeatures<ST> * data)
// update estimator
for (index_t i=0; i < m_num_active; ++i)
beta[m_active_set[i]] += gamma * wA(i);

// early stopping on max l1-norm
if (m_max_l1_norm > 0)
if (get_max_l1_norm() > 0)
{
ST l1 = SGVector<ST>::onenorm(&beta[0], n_fea);
if (l1 > m_max_l1_norm)
if (l1 > get_max_l1_norm())
{
// stopping with interpolated beta
stop_cond = true;
lasso_cond = false;
ST l1_prev = (ST) SGVector<ST>::onenorm(&m_beta_path_t[nloop][0], n_fea);
ST s = (m_max_l1_norm-l1_prev)/(l1-l1_prev);
ST s = (get_max_l1_norm()-l1_prev)/(l1-l1_prev);

typename SGVector<ST>::EigenVectorXtMap map_beta(&beta[0], n_fea);
typename SGVector<ST>::EigenVectorXtMap map_beta_prev(&m_beta_path_t[nloop][0], n_fea);
Expand All @@ -333,13 +334,13 @@ bool CLeastAngleRegression::train_machine_templated(CDenseFeatures<ST> * data)

nloop++;
m_beta_path_t.push_back(beta);
if (size_t(m_num_active) >= m_beta_idx.size())
if (size_t(m_num_active) >= get_path_size())
m_beta_idx.push_back(nloop);
else
m_beta_idx[m_num_active] = nloop;

// early stopping with max number of non-zero variables
if (m_max_nonz > 0 && m_num_active >= m_max_nonz)
if (get_max_non_zero() > 0 && m_num_active >= get_max_non_zero())
stop_cond = true;
SG_DEBUG("Added : %d , Dropped %d, Active set size %d max_corr %.17f \n", i_max_corr, i_kick, m_num_active, max_corr);
}
Expand All @@ -356,8 +357,8 @@ bool CLeastAngleRegression::train_machine_templated(CDenseFeatures<ST> * data)

// assign default estimator
set_w(SGVector<float64_t>(n_fea));
switch_w(m_beta_idx.size()-1);

switch_w(get_path_size()-1);
return true;
}

Expand Down
4 changes: 2 additions & 2 deletions src/shogun/regression/LinearRidgeRegression.cpp
Expand Up @@ -29,14 +29,14 @@ CLinearRidgeRegression::CLinearRidgeRegression(float64_t tau, CDenseFeatures<flo
{
init();

m_tau=tau;
set_tau(tau);
set_labels(lab);
set_features(data);
}

void CLinearRidgeRegression::init()
{
m_tau=1e-6;
set_tau(1e-6);

SG_ADD(&m_tau, "tau", "Regularization parameter", MS_AVAILABLE);
}
Expand Down
16 changes: 8 additions & 8 deletions src/shogun/regression/svr/LibLinearRegression.cpp
Expand Up @@ -77,13 +77,13 @@ bool CLibLinearRegression::train_machine(CFeatures* data)
}

SGVector<float64_t> w;
if (m_use_bias)
if (get_use_bias())
w=SGVector<float64_t>(SG_MALLOC(float64_t, num_feat+1), num_feat);
else
w=SGVector<float64_t>(num_feat);

liblinear_problem prob;
if (m_use_bias)
if (get_use_bias())
{
prob.n=w.vlen+1;
memset(w.vector, 0, sizeof(float64_t)*(w.vlen+1));
Expand All @@ -103,10 +103,10 @@ bool CLibLinearRegression::train_machine(CFeatures* data)
{
double* Cs = SG_MALLOC(double, prob.l);
for(int i = 0; i < prob.l; i++)
Cs[i] = m_C;
Cs[i] = get_C();

function *fun_obj=new l2r_l2_svr_fun(&prob, Cs, m_tube_epsilon);
CTron tron_obj(fun_obj, m_epsilon);
function *fun_obj=new l2r_l2_svr_fun(&prob, Cs, get_tube_epsilon());
CTron tron_obj(fun_obj, get_epsilon());
tron_obj.tron(w.vector, m_max_train_time);
delete fun_obj;
SG_FREE(Cs);
Expand Down Expand Up @@ -160,10 +160,10 @@ bool CLibLinearRegression::train_machine(CFeatures* data)
void CLibLinearRegression::solve_l2r_l1l2_svr(SGVector<float64_t>& w, const liblinear_problem *prob)
{
int l = prob->l;
double C = m_C;
double p = m_tube_epsilon;
double C = get_C();
double p = get_tube_epsilon();
int w_size = prob->n;
double eps = m_epsilon;
double eps = get_epsilon();
int i, s, iter = 0;
int max_iter = 1000;
int active_size = l;
Expand Down