Skip to content

Commit

Permalink
Merge pull request #94 from schalkdaniel/general_updates
Browse files Browse the repository at this point in the history
update some destructors
  • Loading branch information
Daniel Schalk committed Jan 18, 2018
2 parents 8c79dd1 + 08a7f30 commit c791dbd
Show file tree
Hide file tree
Showing 10 changed files with 72 additions and 56 deletions.
1 change: 1 addition & 0 deletions src/baselearner_list.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -84,6 +84,7 @@ blearner_factory_map BaselearnerList::GetMap ()
// Remove all registered factorys:
void BaselearnerList::ClearMap ()
{
std::cout << "Delete BaselearnerList!" << std::cout;
// This deletes all the data which are sometimes necessary to re register
// factorys!
// for (blearner_factory_map::iterator it = my_factory_map.begin(); it != my_factory_map.end(); ++it) {
Expand Down
2 changes: 2 additions & 0 deletions src/baselearner_list.h
Original file line number Diff line number Diff line change
Expand Up @@ -80,6 +80,8 @@ class BaselearnerList

// Get the data used for modelling:
std::pair<std::vector<std::string>, arma::mat> GetModelFrame ();

// ~BaselearnerList () {std::cout << "Destroy BaselearnerList!" << std::endl; }
};

} // namespace blearnerlist
Expand Down
13 changes: 4 additions & 9 deletions src/compboost.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -37,11 +37,6 @@
//
// ========================================================================== //



// THIS ONE IS UNDER PROGRESS!


#include "compboost.h"

namespace cboost {
Expand Down Expand Up @@ -193,10 +188,10 @@ std::vector<std::string> Compboost::GetSelectedBaselearner ()
// Destructor:
Compboost::~Compboost ()
{
std::cout << "Call Compboost Destructor" << std::endl;
delete used_optimizer;
delete used_loss;
delete used_logger;
// std::cout << "Call Compboost Destructor" << std::endl;
// delete used_optimizer;
// delete used_loss;
// delete used_logger;
}

} // namespace cboost
21 changes: 14 additions & 7 deletions src/compboost_modules.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -211,9 +211,9 @@ class BaselearnerListWrapper
obj.ClearMap();
}

blearnerlist::BaselearnerList getFactoryList ()
blearnerlist::BaselearnerList* getFactoryList ()
{
return obj;
return &obj;
}

Rcpp::List getModelFrame ()
Expand Down Expand Up @@ -477,15 +477,20 @@ class GreedyOptimizer : public OptimizerWrapper
{
public:
GreedyOptimizer () { obj = new optimizer::Greedy(); }

Rcpp::List testOptimizer (arma::vec& response, BaselearnerListWrapper factory_list)
{
std::string temp_str = "test run";
blearner::Baselearner* blearner_test = obj->FindBestBaselearner(temp_str, response, factory_list.getFactoryList().GetMap());
blearner::Baselearner* blearner_test = obj->FindBestBaselearner(temp_str, response, factory_list.getFactoryList()->GetMap());

return Rcpp::List::create(
Rcpp::List out = Rcpp::List::create(
Rcpp::Named("selected.learner") = blearner_test->GetIdentifier(),
Rcpp::Named("parameter") = blearner_test->GetParameter()
);

delete blearner_test;

return out;
}
};

Expand Down Expand Up @@ -522,6 +527,7 @@ class CompboostWrapper
learning_rate0 = learning_rate;
used_logger = logger_list.getLoggerList();
used_optimizer = optimizer.getOptimizer();
blearner_list_ptr = factory_list.getFactoryList();

// used_optimizer = new optimizer::Greedy();
// std::cout << "<<CompboostWrapper>> Create new Optimizer" << std::endl;
Expand All @@ -538,7 +544,7 @@ class CompboostWrapper
// // std::cout << "<<CompboostWrapper>> Register Logger" << std::endl;

obj = new cboost::Compboost(response, learning_rate0, stop_if_all_stopper_fulfilled,
used_optimizer, loss.getLoss(), used_logger, factory_list.getFactoryList());
used_optimizer, loss.getLoss(), used_logger, *blearner_list_ptr);
// std::cout << "<<CompboostWrapper>> Create Compboost" << std::endl;
}

Expand Down Expand Up @@ -578,10 +584,11 @@ class CompboostWrapper

private:

blearnerlist::BaselearnerList* blearner_list_ptr;
loggerlist::LoggerList* used_logger;
optimizer::Optimizer* used_optimizer = NULL;
optimizer::Optimizer* used_optimizer;
cboost::Compboost* obj;
arma::mat* eval_data = NULL;
arma::mat* eval_data;

unsigned int max_iterations;
double learning_rate0;
Expand Down
5 changes: 3 additions & 2 deletions src/optimizer.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -86,14 +86,15 @@ blearner::Baselearner* Greedy::FindBestBaselearner (std::string& iteration_id,
// baselearner:
if (ssq_best == 0) {
ssq_best = ssq_temp;
blearner_best = blearner_temp;
blearner_best = blearner_temp->Clone();
}

if (ssq_temp < ssq_best) {
ssq_best = ssq_temp;
blearner_best = blearner_temp;
blearner_best = blearner_temp->Clone();
}

delete blearner_temp;
blearner_temp = NULL;

// if (k > 0) {
Expand Down
2 changes: 2 additions & 0 deletions tests/testthat/test_factory.R
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@ test_that("polynomial factory works", {
X.linear = 1:10
X.cubic = X.linear^3

set.seed(pi)
X.test = as.matrix(runif(200))

y = 3 * X.linear + rnorm(10, 0, 2)
Expand Down Expand Up @@ -77,6 +78,7 @@ test_that("custom factory works", {
}

# Data X and response y:
set.seed(pi)
X = matrix(1:10, ncol = 1)
y = sin(as.numeric(X)) + rnorm(10, 0, 0.6)

Expand Down
2 changes: 2 additions & 0 deletions tests/testthat/test_optimizer.R
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,9 @@ context("The optimizer works")

test_that("greedy optimizer works", {

set.seed(pi)
X = as.matrix(runif(100, -4, 4))

y.linear = as.numeric(32 * X)
y.cubic = as.numeric(16 * X^3)
y.pow5 = as.numeric(8 * X^5)
Expand Down
20 changes: 20 additions & 0 deletions tests/testthat/test_printer.R
Original file line number Diff line number Diff line change
@@ -1,5 +1,25 @@
context("Printer works")

test_that("factory list printer works", {

factory.list = FactoryList$new()

# A hack to suppress console output:
tc = textConnection(NULL, "w")
sink(tc)

test.factory.list.printer = show(factory.list)

sink()
close(tc)

# Test:
# ---------

expect_equal(test.factory.list.printer, "FactoryListPrinter")

})

test_that("Loss printer works", {

quadratic.loss = QuadraticLoss$new()
Expand Down
4 changes: 2 additions & 2 deletions tutorials/compboost_vs_mboost.R
Original file line number Diff line number Diff line change
Expand Up @@ -107,12 +107,12 @@ cboost.xselect = match(
)
)

all.equal(predict(mod), cboost$getPrediction())
all.equal(mod$xselect(), cboost.xselect)

# Check if the prediction is the same:
# ------------------------------------

all.equal(mod$xselect(), cboost.xselect)
all.equal(predict(mod), cboost$getPrediction())
# cboost$GetParameter()

# Benchmark:
Expand Down
58 changes: 22 additions & 36 deletions tutorials/run_optimizer.R
Original file line number Diff line number Diff line change
@@ -1,43 +1,29 @@
# Define a 'BaselearnerList':
# ===================================
set.seed(pi)
X = as.matrix(runif(100, -4, 4))

X = matrix(1:10, ncol = 1)
y = 3 * as.numeric(X^3) + rnorm(10, 0, 2)
y.linear = as.numeric(32 * X)
y.cubic = as.numeric(16 * X^3)
y.pow5 = as.numeric(8 * X^5)

# Create some stupid baselearner (obviously the linear one is the best):
bl.linear = BaselearnerWrapper$new("l1", X, "x", 1)
bl.quadratic = BaselearnerWrapper$new("q1", X, "x", 2)
bl.cubic = BaselearnerWrapper$new("c1", X, "x", 3)
bl.linear2 = BaselearnerWrapper$new("l0", 2 * X, "x", 1)
# Create new linear baselearner of hp and wt:
linear.factory = PolynomialFactory$new(X, "X", 1)
cubic.factory = PolynomialFactory$new(X, "X", 3)
pow5.factory = PolynomialFactory$new(X, "X", 5)

# Register the learner:
bl.quadratic$RegisterFactory("x")
bl.cubic$RegisterFactory("x")
bl.linear2$RegisterFactory("2*x")
bl.linear$RegisterFactory("x")
# Create new factory list:
factory.list = FactoryList$new()

# Train the lienar one to compare with the result of the optimizer:
bl.linear$train(y)
# Register factorys:
factory.list$registerFactory(linear.factory)
factory.list$registerFactory(cubic.factory)
factory.list$registerFactory(pow5.factory)

# Print all registered baselearner:
printRegisteredFactorys()
# Optimizer:
greedy.optimizer = GreedyOptimizer$new()

# Get best Baselearner:
# ===================================
res.linear = greedy.optimizer$testOptimizer(y.linear, factory.list)
res.cubic = greedy.optimizer$testOptimizer(y.cubic, factory.list)
res.pow5 = greedy.optimizer$testOptimizer(y.pow5, factory.list)

# Use the greedy algorithm:
getBestBaselearner(y)

bl.linear$GetParameter()

# What happens here? Why aren't we get the linear parameter?
# Register that we have also used 2*x as linear baselearner which basically
# is exactly the same as x. The parameter here is just divided by 2:
bl.linear2$train(y)
bl.linear2$GetParameter()

# Clear the registry:
clearRegisteredFactorys()

# Take a look if all registrys were deleted:
printRegisteredFactorys()
res.cubic
res.linear

0 comments on commit c791dbd

Please sign in to comment.