Skip to content

Commit

Permalink
Memory leaks fixes for unit tests (#4036)
Browse files Browse the repository at this point in the history
* [ProgressBar] Fix uninitialized variables.

* [Valgrind] Fix memory leak inside create_object method.

* [LDA] Fix memory leak inside LDA.

* [KernelPCA] Fix memory leak inside unit test.

* [FisherLDA] Fix memory leak inside unit test (unit-FLDATest).

* [KRRNystrom] Fix memory leak inside unit test (unit-KRRNystrom).

* [clang-format] Fix style.
  • Loading branch information
geektoni authored and lisitsyn committed Dec 20, 2017
1 parent 041d30e commit 76f3ad6
Show file tree
Hide file tree
Showing 7 changed files with 48 additions and 22 deletions.
1 change: 1 addition & 0 deletions src/shogun/base/class_list.h
Expand Up @@ -43,6 +43,7 @@ namespace shogun {
auto* cast = dynamic_cast<T*>(object);
if (!cast)
{
delete object;
SG_SERROR("Type mismatch");
}
return cast;
Expand Down
6 changes: 4 additions & 2 deletions src/shogun/base/progress.h
Expand Up @@ -84,8 +84,8 @@ namespace shogun
const SGIO& io, float64_t max_value, float64_t min_value,
const std::string& prefix, const SG_PRG_MODE mode)
: m_io(io), m_max_value(max_value), m_min_value(min_value),
m_prefix(prefix), m_mode(mode), m_last_progress(0),
m_last_progress_time(0),
m_prefix(prefix), m_mode(mode), m_columns_num(0), m_rows_num(0),
m_last_progress(0), m_last_progress_time(0),
m_progress_start_time(CTime::get_curtime()),
m_current_value(min_value)
{
Expand Down Expand Up @@ -397,6 +397,8 @@ namespace shogun
m_rows_num = csbi.srWindow.Bottom - csbi.srWindow.Top + 1;
#else
struct winsize wind;
wind.ws_col = 0;
wind.ws_row = 0;
ioctl(STDOUT_FILENO, TIOCGWINSZ, &wind);
m_columns_num = wind.ws_col;
m_rows_num = wind.ws_row;
Expand Down
13 changes: 6 additions & 7 deletions src/shogun/classifier/LDA.cpp
Expand Up @@ -75,27 +75,26 @@ bool CLDA::train_machine(CFeatures *data)
SG_ERROR("Specified features are not of type CDotFeatures\n")
set_features((CDotFeatures*) data);
}
else
else if (!features)
{
data = get_features();
REQUIRE(data, "Features have not been provided.\n")
}

REQUIRE(
data->get_num_vectors() == m_labels->get_num_labels(),
features->get_num_vectors() == m_labels->get_num_labels(),
"Number of training examples(%d) should be equal to number of labels "
"(%d)!\n",
data->get_num_vectors(), m_labels->get_num_labels());
features->get_num_vectors(), m_labels->get_num_labels());

REQUIRE(
features->get_feature_class() == C_DENSE,
"LDA only works with dense features")

if(data->get_feature_type() == F_SHORTREAL)
if (features->get_feature_type() == F_SHORTREAL)
return CLDA::train_machine_templated<float32_t>();
else if(data->get_feature_type() == F_DREAL)
else if (features->get_feature_type() == F_DREAL)
return CLDA::train_machine_templated<float64_t>();
else if(data->get_feature_type() == F_LONGREAL)
else if (features->get_feature_type() == F_LONGREAL)
return CLDA::train_machine_templated<floatmax_t>();

return false;
Expand Down
8 changes: 5 additions & 3 deletions src/shogun/preprocessor/KernelPCA.cpp
Expand Up @@ -149,11 +149,13 @@ SGVector<float64_t> CKernelPCA::apply_to_feature_vector(SGVector<float64_t> vect
{
ASSERT(m_initialized)

std::unique_ptr<CFeatures> features(
new CDenseFeatures<float64_t>(SGMatrix<float64_t>(vector)));
CFeatures* features =
new CDenseFeatures<float64_t>(SGMatrix<float64_t>(vector));
SG_REF(features)

SGMatrix<float64_t> result_matrix = apply_to_feature_matrix(features.get());
SGMatrix<float64_t> result_matrix = apply_to_feature_matrix(features);

SG_UNREF(features)
return SGVector<float64_t>(result_matrix);
}

Expand Down
9 changes: 9 additions & 0 deletions tests/unit/preprocessor/FisherLDA_unittest.cc
Expand Up @@ -119,7 +119,16 @@ class FLDATest: public::testing::Test
labels_vector[i*num+j]=i;
labels=new CMulticlassLabels(labels_vector);

SG_REF(dense_feat);
SG_REF(labels);
}

~FLDATest()
{
SG_UNREF(dense_feat);
SG_UNREF(labels);
}

CDenseFeatures<float64_t>* dense_feat;
CMulticlassLabels* labels;
};
Expand Down
17 changes: 15 additions & 2 deletions tests/unit/preprocessor/KernelPCA_unittest.cc
Expand Up @@ -41,10 +41,15 @@ TEST(KernelPCA, apply_to_feature_matrix)
CDenseFeatures<float64_t>* test_feats =
new CDenseFeatures<float64_t>(test_matrix);

SG_REF(train_feats)
SG_REF(test_feats)

CGaussianKernel* kernel = new CGaussianKernel();
SG_REF(kernel)
kernel->set_width(1);

CKernelPCA* kpca = new CKernelPCA(kernel);
SG_REF(kpca)
kpca->set_target_dim(target_dim);
kpca->init(train_feats);

Expand All @@ -54,7 +59,10 @@ TEST(KernelPCA, apply_to_feature_matrix)
for (index_t i = 0; i < num_test_vectors * target_dim; ++i)
EXPECT_NEAR(CMath::abs(embedding[i]), CMath::abs(resdata[i]), 1E-6);

SG_FREE(kpca);
SG_UNREF(train_feats)
SG_UNREF(test_feats)
SG_UNREF(kpca);
SG_UNREF(kernel);
}

TEST(KernelPCA, apply_to_feature_vector)
Expand All @@ -65,11 +73,14 @@ TEST(KernelPCA, apply_to_feature_vector)

CDenseFeatures<float64_t>* train_feats =
new CDenseFeatures<float64_t>(train_matrix);
SG_REF(train_feats)

CGaussianKernel* kernel = new CGaussianKernel();
SG_REF(kernel)
kernel->set_width(1);

CKernelPCA* kpca = new CKernelPCA(kernel);
SG_REF(kpca)
kpca->set_target_dim(target_dim);
kpca->init(train_feats);

Expand All @@ -79,5 +90,7 @@ TEST(KernelPCA, apply_to_feature_vector)
for (index_t i = 0; i < target_dim; ++i)
EXPECT_NEAR(CMath::abs(embedding[i]), CMath::abs(resdata[i]), 1E-6);

SG_FREE(kpca);
SG_UNREF(train_feats)
SG_UNREF(kpca);
SG_UNREF(kernel);
}
16 changes: 8 additions & 8 deletions tests/unit/regression/krrnystrom_unittest.cc
Expand Up @@ -36,8 +36,6 @@
#include <shogun/base/some.h>
#include <gtest/gtest.h>

#ifdef HAVE_CXX11

using namespace shogun;

/**
Expand Down Expand Up @@ -93,8 +91,10 @@ TEST(KRRNystrom, apply_and_compare_to_KRR_with_all_columns)
for (index_t i=0; i<num_vectors; ++i)
EXPECT_NEAR(alphas[i], alphas_krr[i], 1E-1);

auto result=nystrom->apply_regression(test_features);
auto result_krr=krr->apply_regression(test_features);
auto result = Some<CRegressionLabels>::from_raw(
nystrom->apply_regression(test_features));
auto result_krr =
Some<CRegressionLabels>::from_raw(krr->apply_regression(test_features));

for (index_t i=0; i<num_vectors; ++i)
EXPECT_NEAR(result->get_label(i), result_krr->get_label(i), 1E-5);
Expand Down Expand Up @@ -149,11 +149,11 @@ TEST(KRRNystrom, apply_and_compare_to_KRR_with_column_subset)
nystrom->train();
krr->train();

auto result=nystrom->apply_regression(test_features);
auto result_krr=krr->apply_regression(test_features);
auto result = Some<CRegressionLabels>::from_raw(
nystrom->apply_regression(test_features));
auto result_krr =
Some<CRegressionLabels>::from_raw(krr->apply_regression(test_features));

for (index_t i=0; i<num_vectors; ++i)
EXPECT_NEAR(result->get_label(i), result_krr->get_label(i), 1E-1);
}

#endif /* HAVE_CXX11 */

0 comments on commit 76f3ad6

Please sign in to comment.