Skip to content

Commit

Permalink
Minor refactorings and changes in:
Browse files Browse the repository at this point in the history
    * all the StructuredOutputModels
    * all the StructuredOuputMachines
to remove redundant methods and intializations
  • Loading branch information
pandaabinash committed Jun 18, 2014
1 parent 225a0e3 commit 36a98f4
Show file tree
Hide file tree
Showing 14 changed files with 26 additions and 181 deletions.
13 changes: 6 additions & 7 deletions src/shogun/structure/CCSOSVM.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -534,19 +534,18 @@ SGSparseVector<float64_t> CCCSOSVM::find_cutting_plane(float64_t* margin)
for (index_t i = 0; i < num_samples; i++)
{
CResultSet* result = m_model->argmax(m_w, i);
if (!result->psi_computed_sparse)
if (result->psi_computed)
{
new_constraint.add(result->psi_truth);
result->psi_pred.scale(-1.0);
new_constraint.add(result->psi_pred);
}
else
else if(result->psi_computed_sparse)
{
new_constraint.add(result->psi_truth_sparse.get_dense(psi_size));
SGVector<float64_t> psi_pred_dense =
result->psi_pred_sparse.get_dense(psi_size);
psi_pred_dense.scale(-1.0);
new_constraint.add(psi_pred_dense);
result->psi_truth_sparse.add_to_dense(1.0, new_constraint.vector,
new_constraint.vlen);
result->psi_pred_sparse.add_to_dense(-1.0, new_constraint.vector,
new_constraint.vlen);
}
/*
printf("%.16lf %.16lf\n",
Expand Down
8 changes: 0 additions & 8 deletions src/shogun/structure/DirectorStructuredModel.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -41,14 +41,6 @@ SGVector< float64_t > CDirectorStructuredModel::get_joint_feature_vector(
return SGVector<float64_t>();
}

SGSparseVector< float64_t > CDirectorStructuredModel::get_sparse_joint_feature_vector(
int32_t feat_idx,
CStructuredData* y)
{
SG_ERROR("Please implemement get_sparse_joint_feature_vector(feat_idx,y) in your target language before use\n")
return SGSparseVector<float64_t>();
}

float64_t CDirectorStructuredModel::delta_loss(CStructuredData* y1, CStructuredData* y2)
{
SG_ERROR("Please implemement delta_loss(y1,y2) in your target language before use\n")
Expand Down
15 changes: 0 additions & 15 deletions src/shogun/structure/DirectorStructuredModel.h
Original file line number Diff line number Diff line change
Expand Up @@ -55,21 +55,6 @@ IGNORE_IN_CLASSLIST class CDirectorStructuredModel : public CStructuredModel
*/
virtual SGVector< float64_t > get_joint_feature_vector(int32_t feat_idx, CStructuredData* y);

/**
* get joint feature vector
*
* \f[
* \vec{\Psi}(\bf{x}_\text{feat\_idx}, \bf{y})
* \f]
*
* @param feat_idx index of the feature vector to use
* @param y structured label to use
*
* @return the joint feature vector
*/
virtual SGSparseVector< float64_t > get_joint_feature_vector(int32_t feat_idx,
CStructuredData* y);

/**
* obtains the argmax of \f$ \Delta(y_{pred}, y_{truth}) +
* \langle w, \Psi(x_{truth}, y_{pred}) \rangle \f$
Expand Down
13 changes: 0 additions & 13 deletions src/shogun/structure/FactorGraphModel.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -278,16 +278,6 @@ SGVector< float64_t > CFactorGraphModel::get_joint_feature_vector(int32_t feat_i
return psi;
}

SGSparseVector< float64_t > CFactorGraphModel::get_sparse_joint_feature_vector(
int32_t feat_idx,
CStructuredData* y)
{
SG_ERROR("compute_sparse_joint_feature(int32_t, CStructuredData*) is not "
"implemented for %s!\n", get_name());

return SGSparseVector< float64_t >();
}

// E(x_i, y; w) - E(x_i, y_i; w) >= L(y_i, y) - xi_i
// xi_i >= max oracle
// max oracle := argmax_y { L(y_i, y) - E(x_i, y; w) + E(x_i, y_i; w) }
Expand Down Expand Up @@ -323,9 +313,6 @@ CResultSet* CFactorGraphModel::argmax(SGVector<float64_t> w, int32_t feat_idx, b
// prepare CResultSet
CResultSet* ret = new CResultSet();
SG_REF(ret);
ret->psi_computed_sparse = false;
ret->psi_pred_sparse = SGSparseVector<float64_t>(0);
ret->psi_truth_sparse = SGSparseVector<float64_t>(0);

// y_truth
CFactorGraphObservation* y_truth =
Expand Down
15 changes: 0 additions & 15 deletions src/shogun/structure/FactorGraphModel.h
Original file line number Diff line number Diff line change
Expand Up @@ -107,21 +107,6 @@ class CFactorGraphModel : public CStructuredModel
*/
virtual SGVector< float64_t > get_joint_feature_vector(int32_t feat_idx, CStructuredData* y);

/**
* get joint feature vector
*
* \f[
* \vec{\Psi}(\bf{x}_\text{feat\_idx}, \bf{y})
* \f]
*
* @param feat_idx index of the feature vector to use
* @param y structured label to use
*
* @return the joint feature vector
*/
virtual SGSparseVector< float64_t > get_sparse_joint_feature_vector(int32_t feat_idx,
CStructuredData* y);

/**
* obtains the argmax of \f$ \Delta(y_{pred}, y_{truth}) +
* \langle w, \Psi(x_{truth}, y_{pred}) \rangle \f$
Expand Down
14 changes: 0 additions & 14 deletions src/shogun/structure/HMSVMModel.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -155,16 +155,6 @@ SGVector< float64_t > CHMSVMModel::get_joint_feature_vector(
return psi;
}

SGSparseVector< float64_t > CHMSVMModel::get_sparse_joint_feature_vector(
int32_t feat_idx,
CStructuredData* y)
{
SG_ERROR("compute_sparse_joint_feature(int32_t, CStructuredData*) is not "
"implemented for %s!\n", get_name());

return SGSparseVector< float64_t >();
}

CResultSet* CHMSVMModel::argmax(
SGVector< float64_t > w,
int32_t feat_idx,
Expand Down Expand Up @@ -316,10 +306,6 @@ CResultSet* CHMSVMModel::argmax(
SGVector< int32_t > opt_path(T);
CResultSet* ret = new CResultSet();
SG_REF(ret);
ret->psi_computed_sparse = false;
ret->psi_pred_sparse = SGSparseVector<float64_t>(0);
ret->psi_truth_sparse = SGSparseVector<float64_t>(0);

ret->score = -CMath::INFTY;
opt_path[T-1] = -1;

Expand Down
15 changes: 0 additions & 15 deletions src/shogun/structure/HMSVMModel.h
Original file line number Diff line number Diff line change
Expand Up @@ -67,21 +67,6 @@ class CHMSVMModel : public CStructuredModel
*/
virtual SGVector< float64_t > get_joint_feature_vector(int32_t feat_idx, CStructuredData* y);

/**
* get joint feature vector
*
* \f[
* \vec{\Psi}(\bf{x}_\text{feat\_idx}, \bf{y})
* \f]
*
* @param feat_idx index of the feature vector to use
* @param y structured label to use
*
* @return the joint feature vector
*/
virtual SGSparseVector< float64_t > get_sparse_joint_feature_vector(int32_t feat_idx,
CStructuredData* y);

/**
* obtains the argmax of \f$ \Delta(y_{pred}, y_{truth}) +
* \langle w, \Psi(x_{truth}, y_{pred}) \rangle \f$
Expand Down
13 changes: 0 additions & 13 deletions src/shogun/structure/MulticlassModel.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -63,16 +63,6 @@ SGVector< float64_t > CMulticlassModel::get_joint_feature_vector(int32_t feat_id
return psi;
}

SGSparseVector< float64_t > CMulticlassModel::get_sparse_joint_feature_vector(
int32_t feat_idx,
CStructuredData* y)
{
SG_ERROR("compute_sparse_joint_feature(int32_t, CStructuredData*) is not "
"implemented for %s!\n", get_name());

return SGSparseVector< float64_t >();
}

CResultSet* CMulticlassModel::argmax(
SGVector< float64_t > w,
int32_t feat_idx,
Expand Down Expand Up @@ -116,9 +106,6 @@ CResultSet* CMulticlassModel::argmax(
// Build the CResultSet object to return
CResultSet* ret = new CResultSet();
SG_REF(ret);
ret->psi_computed_sparse = false;
ret->psi_pred_sparse = SGSparseVector<float64_t>(0);
ret->psi_truth_sparse = SGSparseVector<float64_t>(0);

CRealNumber* y = new CRealNumber(ypred);
SG_REF(y);
Expand Down
15 changes: 0 additions & 15 deletions src/shogun/structure/MulticlassModel.h
Original file line number Diff line number Diff line change
Expand Up @@ -64,21 +64,6 @@ class CMulticlassModel : public CStructuredModel
*/
virtual SGVector< float64_t > get_joint_feature_vector(int32_t feat_idx, CStructuredData* y);

/**
* get joint feature vector
*
* \f[
* \vec{\Psi}(\bf{x}_\text{feat\_idx}, \bf{y})
* \f]
*
* @param feat_idx index of the feature vector to use
* @param y structured label to use
*
* @return the joint feature vector
*/
virtual SGSparseVector< float64_t > get_sparse_joint_feature_vector(int32_t feat_idx,
CStructuredData* y);

/**
* obtains the argmax of \f$ \Delta(y_{pred}, y_{truth}) +
* \langle w, \Psi(x_{truth}, y_{pred}) \rangle \f$
Expand Down
13 changes: 0 additions & 13 deletions src/shogun/structure/MultilabelModel.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -83,16 +83,6 @@ SGVector<float64_t> CMultilabelModel::get_joint_feature_vector(int32_t feat_idx,
return psi;
}

SGSparseVector< float64_t > CMultilabelModel::get_sparse_joint_feature_vector(
int32_t feat_idx,
CStructuredData* y)
{
SG_ERROR("compute_sparse_joint_feature(int32_t, CStructuredData*) is not "
"implemented for %s!\n", get_name());

return SGSparseVector< float64_t >();
}

float64_t CMultilabelModel::delta_loss(CStructuredData * y1, CStructuredData * y2)
{
CSparseMultilabel * y1_slabel = CSparseMultilabel::obtain_from_generic(y1);
Expand Down Expand Up @@ -211,9 +201,6 @@ CResultSet * CMultilabelModel::argmax(SGVector<float64_t> w, int32_t feat_idx,

CResultSet * ret = new CResultSet();
SG_REF(ret);
ret->psi_computed_sparse = false;
ret->psi_pred_sparse = SGSparseVector<float64_t>(0);
ret->psi_truth_sparse = SGSparseVector<float64_t>(0);

CSparseMultilabel * y_pred = new CSparseMultilabel(y_pred_sparse);
SG_REF(y_pred);
Expand Down
12 changes: 0 additions & 12 deletions src/shogun/structure/MultilabelModel.h
Original file line number Diff line number Diff line change
Expand Up @@ -55,18 +55,6 @@ class CMultilabelModel : public CStructuredModel
virtual SGVector<float64_t> get_joint_feature_vector(int32_t feat_idx,
CStructuredData * y);

/** get joint feature vector
*
* \f[
* \vec{\Psi}(\bf{x}_\text{feat\_idx}, \bf{y})
* \f]
*
* @param feat_idx index of the feature vector to use
* @pram y structured label to use
*/
virtual SGSparseVector<float64_t> get_sparse_joint_feature_vector(int32_t feat_idx,
CStructuredData * y);

/** obtain the argmax of \f$ \Delta(y_{pred}, y_{truth}) + \langle w,
* \Psi(x_{truth}, y_{pred}) \rangle \f$
*
Expand Down
28 changes: 8 additions & 20 deletions src/shogun/structure/PrimalMosekSOSVM.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -215,16 +215,16 @@ float64_t CPrimalMosekSOSVM::compute_loss_arg(CResultSet* result) const
// Dimensionality of the joint feature space
int32_t M = m_w.vlen;

if(!result->psi_computed_sparse)
if(result->psi_computed)
{
return SGVector< float64_t >::dot(m_w.vector, result->psi_pred.vector, M) +
result->delta -
SGVector< float64_t >::dot(m_w.vector, result->psi_truth.vector, M);
}
else
else if(result->psi_computed_sparse)
{
return result->psi_pred_sparse.dense_dot(1.0, m_w.vector, m_w.vlen, 0) -
result->delta +
return result->psi_pred_sparse.dense_dot(1.0, m_w.vector, m_w.vlen, 0) +
result->delta -
result->psi_truth_sparse.dense_dot(1.0, m_w.vector, m_w.vlen, 0);
}
}
Expand All @@ -251,28 +251,16 @@ bool CPrimalMosekSOSVM::add_constraint(
int32_t M = m_model->get_dim();
SGVector< float64_t > dPsi(M);

if (!result->psi_computed_sparse)
if (result->psi_computed)
{
for ( int i = 0 ; i < M ; ++i )
dPsi[i] = result->psi_pred[i] - result->psi_truth[i]; // -dPsi(y)
}
else
else if(result->psi_computed_sparse)
{
dPsi.zero();

SGSparseVector<float64_t> psi_pred_sparse = result->psi_pred_sparse;
for (int32_t i = 0; i < psi_pred_sparse.num_feat_entries; i++)
{
dPsi[psi_pred_sparse.features[i].feat_index] +=
psi_pred_sparse.features[i].entry;
}

SGSparseVector<float64_t> psi_truth_sparse = result->psi_truth_sparse;
for (int32_t i = 0; i < psi_truth_sparse.num_feat_entries; i++)
{
dPsi[psi_truth_sparse.features[i].feat_index] -=
psi_truth_sparse.features[i].entry;
}
result->psi_pred_sparse.add_to_dense(1.0, dPsi.vector, dPsi.vlen);
result->psi_truth_sparse.add_to_dense(-1.0, dPsi.vector, dPsi.vlen);
}

return ( mosek->add_constraint_sosvm(dPsi, con_idx, train_idx,
Expand Down
20 changes: 4 additions & 16 deletions src/shogun/structure/StochasticSOSVM.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -127,29 +127,17 @@ bool CStochasticSOSVM::train_machine(CFeatures* data)
SGVector<float64_t> psi_i(M);
SGVector<float64_t> w_s(M);

if (!result->psi_computed_sparse)
if (result->psi_computed)
{
SGVector<float64_t>::add(psi_i.vector,
1.0, result->psi_truth.vector, -1.0, result->psi_pred.vector,
psi_i.vlen);
}
else
else if(result->psi_computed_sparse)
{
psi_i.zero();

SGSparseVector<float64_t> psi_pred_sparse = result->psi_pred_sparse;
for (int32_t i = 0; i < psi_pred_sparse.num_feat_entries; i++)
{
psi_i[psi_pred_sparse.features[i].feat_index] +=
psi_pred_sparse.features[i].entry;
}

SGSparseVector<float64_t> psi_truth_sparse = result->psi_truth_sparse;
for (int32_t i = 0; i < psi_truth_sparse.num_feat_entries; i++)
{
psi_i[psi_truth_sparse.features[i].feat_index] -=
psi_truth_sparse.features[i].entry;
}
result->psi_pred_sparse.add_to_dense(1.0, psi_i.vector, psi_i.vlen);
result->psi_truth_sparse.add_to_dense(-1.0, psi_i.vector, psi_i.vlen);
}

w_s = psi_i.clone();
Expand Down
13 changes: 8 additions & 5 deletions src/shogun/structure/StructuredModel.h
Original file line number Diff line number Diff line change
Expand Up @@ -64,19 +64,22 @@ struct CResultSet : public CSGObject
CStructuredData* argmax;

/** whether joint feature vector is sparse or not */
bool psi_computed_sparse;
bool psi_computed_sparse = false;

/** whether joint feature vector is dense or not */
bool psi_computed = true;

/** joint feature vector for the given truth */
SGVector< float64_t > psi_truth;
SGVector< float64_t > psi_truth = SGVector<float64_t>(0);

/** joint feature vector for the prediction */
SGVector< float64_t > psi_pred;
SGVector< float64_t > psi_pred = SGVector<float64_t>(0);

/** joint feature vector for the given truth */
SGSparseVector< float64_t > psi_truth_sparse;
SGSparseVector< float64_t > psi_truth_sparse = SGSparseVector<float64_t>(0);

/** joint feature vector for the prediction */
SGSparseVector< float64_t > psi_pred_sparse;
SGSparseVector< float64_t > psi_pred_sparse = SGSparseVector<float64_t>(0);

/** \f$ \Delta(y_{pred}, y_{truth}) + \langle w,
* \Psi(x_{truth}, y_{pred}) - \Psi(x_{truth}, y_{truth}) \rangle \f$ */
Expand Down

0 comments on commit 36a98f4

Please sign in to comment.