Skip to content

Commit

Permalink
Merge pull request #217 from schalkdaniel/add_arg_handlers
Browse files Browse the repository at this point in the history
add handler
  • Loading branch information
Daniel Schalk committed Jun 28, 2018
2 parents eb5ae31 + 62fd6c7 commit 8940f9e
Show file tree
Hide file tree
Showing 5 changed files with 179 additions and 41 deletions.
43 changes: 43 additions & 0 deletions R/blearner_handler.R
Original file line number Diff line number Diff line change
@@ -0,0 +1,43 @@
.handleRcpp_PolynomialBlearner = function (degree = 1, intercept = TRUE, ...) {

nuisance = list(...)
if (length(nuisance) > 0) {
warning("Following arguments are ignored by the polynomial base-learner: ", paste(names(nuisance), collapse = ", "))
}
params = list(degree = degree, intercept = intercept)

return (params)
}

.handleRcpp_PSplineBlearner = function (degree = 3, n.knots = 20, penalty = 2, differences = 2, ...) {

nuisance = list(...)
if (length(nuisance) > 0) {
warning("Following arguments are ignored by the spline base-learner: ", paste(names(nuisance), collapse = ", "))
}
params = list(degree = degree, n.knots = n.knots, penalty = penalty, differences = differences)

return (params)
}

.handleRcpp_CustomBlearner = function (instantiate.fun, train.fun, predict.fun, param.fun, ...) {

nuisance = list(...)
if (length(nuisance) > 0) {
warning("Following arguments are ignored by the custom base-learner: ", paste(names(nuisance), collapse = ", "))
}
params = list(instantiate.fun = instantiate.fun, train.fun = train.fun, predict.fun = predict.fun, param.fun = param.fun)

return (params)
}

.handleRcpp_CustomCppBlearner = function (instantiate.ptr, train.ptr, predict.ptr, ...) {

nuisance = list(...)
if (length(nuisance) > 0) {
warning("Following arguments are ignored by the custom cpp base-learner: ", paste(names(nuisance), collapse = ", "))
}
params = list(instantiate.ptr = instantiate.ptr, train.ptr = train.ptr, predict.ptr = predict.ptr)

return (params)
}
68 changes: 37 additions & 31 deletions R/compboost.R
Original file line number Diff line number Diff line change
Expand Up @@ -568,7 +568,13 @@ private = list(
private$bl.list[[id]]$source = data.source$new(as.matrix(data.columns), paste(feature, collapse = "_"))
private$bl.list[[id]]$feature = feature
private$bl.list[[id]]$target = data.target$new()
private$bl.list[[id]]$factory = bl.factory$new(private$bl.list[[id]]$source, private$bl.list[[id]]$target, id.fac, ...)

# Call handler for default arguments and argument handling:
handler.name = paste0(".handle", bl.factory@.Data)
par.set = c(source = private$bl.list[[id]]$source, target = private$bl.list[[id]]$target, id = id.fac, do.call(handler.name, list(...)))
private$bl.list[[id]]$factory = do.call(bl.factory$new, par.set)
# private$bl.list[[id]]$factory = bl.factory$new(private$bl.list[[id]]$source, private$bl.list[[id]]$target, id.fac, ...)

self$bl.factory.list$registerFactory(private$bl.list[[id]]$factory)
private$bl.list[[id]]$source = NULL

Expand Down Expand Up @@ -599,33 +605,33 @@ private = list(
)
)

if (FALSE) {
load_all()
cars$dist_cat = ifelse(cars$speed > 15, "A", "B")
cars$foo_1 = rnorm(50)
cb = Compboost$new(cars, "speed", loss = QuadraticLoss$new(10))
cb
cb$risk()
cb$selected()
cb$addBaselearner("dist_cat", "linear", PolynomialBlearnerFactory, degree = 1, intercept = TRUE)
lapply(c("dist", "foo_1"), function(x) cb$addBaselearner(x, "linear", PolynomialBlearnerFactory, degree = 1))
cb$train(5)
cb$risk()
cb$selected()
# cb$addBaselearner(c("dist", "foo"), "quadratic", PolynomialBlearnerFactory, degree = 2, intercept = TRUE)
# cb$addBaselearner("dist", "spline", PSplineBlearnerFactory, degree = 3, knots = 10, penalty = 2, differences = 2)
#cb$addBaselearner("dist_cat", "linear", PolynomialBlearnerFactory, degree = 1)
cb$addLogger(IterationLogger, use.as.stopper = TRUE, logger.id = "bla", iter.max = 500)
cb$train(NULL)
cb
cb$predict()
cb$predict(cars)
cb$train(10)
cb2$train(200)
gc()
cb$train(100000)
cb$bl.factory.list
cb$model$getLoggerData()
head(cb$model$getParameterMatrix()[[2]])
cb$model$getParameterMatrix()[[1]]
}
# if (FALSE) {
# load_all()
# cars$dist_cat = ifelse(cars$speed > 15, "A", "B")
# cars$foo_1 = rnorm(50)
# cb = Compboost$new(cars, "speed", loss = QuadraticLoss$new(10))
# cb
# cb$risk()
# cb$selected()
# cb$addBaselearner("dist_cat", "linear", PolynomialBlearnerFactory, degree = 1, intercept = TRUE)
# lapply(c("dist", "foo_1"), function(x) cb$addBaselearner(x, "linear", PolynomialBlearnerFactory, degree = 1))
# cb$train(5)
# cb$risk()
# cb$selected()
# # cb$addBaselearner(c("dist", "foo"), "quadratic", PolynomialBlearnerFactory, degree = 2, intercept = TRUE)
# # cb$addBaselearner("dist", "spline", PSplineBlearnerFactory, degree = 3, knots = 10, penalty = 2, differences = 2)
# #cb$addBaselearner("dist_cat", "linear", PolynomialBlearnerFactory, degree = 1)
# cb$addLogger(IterationLogger, use.as.stopper = TRUE, logger.id = "bla", iter.max = 500)
# cb$train(NULL)
# cb
# cb$predict()
# cb$predict(cars)
# cb$train(10)
# cb2$train(200)
# gc()
# cb$train(100000)
# cb$bl.factory.list
# cb$model$getLoggerData()
# head(cb$model$getParameterMatrix()[[2]])
# cb$model$getParameterMatrix()[[1]]
# }
13 changes: 13 additions & 0 deletions other/module_test.R
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
library(Rcpp)
library(inline)

fx <- inline::cxxfunction(signature(), plugin="Rcpp", include=readLines("other/module_test.cpp"))

## assumes fx_unif <- cxxfunction(...) ran
unif_module <- Module("unif_module", getDynLib(fx))
Uniform <- unif_module$Uniform

Uniform@fields

u <- new(Uniform, 0, 10)
u$draw(10L)
33 changes: 33 additions & 0 deletions other/module_test.cpp
Original file line number Diff line number Diff line change
@@ -0,0 +1,33 @@
using namespace Rcpp;
class Uniform {
public:
Uniform(double min_, double max_) :
min(min_), max(max_) {}
NumericVector draw(int n) const {
RNGScope scope;
return runif(n, min, max);
}
double min, max;
double degree = 1;
// Rcpp::List mylist = Rcpp::List::create(Rcpp::Named("degree") = 1,
// Rcpp::Named("intercept") = TRUE);
};

double uniformRange(Uniform* w) {
return w->max - w->min;
}

double getDefaults () {
return 1.1;
}

RCPP_MODULE(unif_module) {
class_<Uniform>("Uniform")
.constructor<double,double>()
.field("min", &Uniform::min)
.field("max", &Uniform::max)
.field("degree", &Uniform::degree)
.method("draw", &Uniform::draw)
.method("range", &uniformRange)
;
}
63 changes: 53 additions & 10 deletions tests/testthat/test_api.R
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ test_that("train works", {
expect_error(cboost$train(10))
expect_error(
cboost$addBaselearner(c("hp", "wt"), "spline", PSplineBlearner, degree = 3,
knots = 10, penalty = 2, differences = 2)
n.knots = 10, penalty = 2, differences = 2)
)

expect_silent(
Expand All @@ -25,7 +25,7 @@ test_that("train works", {
)
expect_silent(
cboost$addBaselearner("hp", "spline", PSplineBlearner, degree = 3,
knots = 10, penalty = 2, differences = 2)
n.knots = 10, penalty = 2, differences = 2)
)
expect_output(cboost$train(4000))
expect_output(cboost$print())
Expand Down Expand Up @@ -73,7 +73,7 @@ test_that("predict works", {
cboost$addBaselearner("mpg_cat", "linear", PolynomialBlearner, degree = 1,
intercept = FALSE)
cboost$addBaselearner("hp", "spline", PSplineBlearner, degree = 3,
knots = 10, penalty = 2, differences = 2)
n.knots = 10, penalty = 2, differences = 2)
})

expect_silent(cboost$train(200, trace = FALSE))
Expand All @@ -93,7 +93,7 @@ test_that("plot works", {
cboost$addBaselearner("mpg_cat", "linear", PolynomialBlearner, degree = 1,
intercept = TRUE)
cboost$addBaselearner("hp", "spline", PSplineBlearner, degree = 3,
knots = 10, penalty = 2, differences = 2)
n.knots = 10, penalty = 2, differences = 2)
cboost$addBaselearner(c("hp", "wt"), "quadratic", PolynomialBlearner, degree = 2,
intercept = TRUE)
})
Expand Down Expand Up @@ -122,7 +122,7 @@ test_that("multiple logger works", {
expect_silent({
cboost = Compboost$new(mtcars, "mpg", loss = QuadraticLoss$new())
cboost$addBaselearner("hp", "spline", PSplineBlearner, degree = 3,
knots = 10, penalty = 2, differences = 2)
n.knots = 10, penalty = 2, differences = 2)
cboost$addBaselearner(c("hp", "wt"), "quadratic", PolynomialBlearner, degree = 2,
intercept = TRUE)
})
Expand Down Expand Up @@ -167,7 +167,7 @@ test_that("custom base-learner works through api", {
}

expect_silent({
cboost$addBaselearner("hp", "custom", CustomBlearner, instanitate.fun = instantiateData,
cboost$addBaselearner("hp", "custom", CustomBlearner, instantiate.fun = instantiateData,
train.fun = trainFun, predict.fun = predictFun, param.fun = extractParameter)
})
expect_output({ cboost$train(100) })
Expand All @@ -190,8 +190,8 @@ test_that("custom cpp base-learner works through api", {
expect_silent({ cboost = Compboost$new(mtcars, "mpg", loss = QuadraticLoss$new()) })
expect_silent({ Rcpp::sourceCpp(code = getCustomCppExample(silent = TRUE)) })
expect_silent({
cboost$addBaselearner("hp", "custom", CustomCppBlearner, instanitate.ptr = dataFunSetter(),
train.ptr = trainFunSetter(), pred.ptr = predictFunSetter())
cboost$addBaselearner("hp", "custom", CustomCppBlearner, instantiate.ptr = dataFunSetter(),
train.ptr = trainFunSetter(), predict.ptr = predictFunSetter())
})
expect_output({ cboost$train(100) })

Expand Down Expand Up @@ -361,7 +361,7 @@ test_that("custom poisson family does the same as mboost", {
cboost$addBaselearner("Sepal.Width", "linear", PolynomialBlearner,
degree = 1, intercept = TRUE)
cboost$addBaselearner("Petal.Length", "spline", PSplineBlearner,
degree = 3, knots = 10, penalty = 2, differences = 2)
degree = 3, n.knots = 10, penalty = 2, differences = 2)
cboost$train(100, trace = FALSE)
})
mod = mboost(Sepal.Length ~ bols(Sepal.Width) + bbs(Petal.Length, differences = 2, lambda = 2,
Expand Down Expand Up @@ -392,7 +392,7 @@ test_that("quadratic loss does the same as mboost", {
cboost$addBaselearner("Sepal.Length", "linear", PolynomialBlearner,
degree = 1, intercept = TRUE)
cboost$addBaselearner("Petal.Length", "spline", PSplineBlearner,
degree = 3, knots = 10, penalty = 2, differences = 2)
degree = 3, n.knots = 10, penalty = 2, differences = 2)
cboost$train(100, trace = FALSE)
})
mod = mboost(Sepal.Width ~ bols(Sepal.Length) + bbs(Petal.Length, differences = 2, lambda = 2,
Expand All @@ -411,4 +411,47 @@ test_that("quadratic loss does the same as mboost", {
x = iris[idx, ]
expect_equal(cboost$predict(x), predict(mod, x))
}
})

test_that("handler throws warnings", {
expect_silent({
cboost = Compboost$new(iris, "Sepal.Width", loss = QuadraticLoss$new())
})

expect_warning(cboost$addBaselearner("Sepal.Length", "linear", PolynomialBlearner,
degree = 1, false.intercept = TRUE))

expect_warning(cboost$addBaselearner("Petal.Length", "spline", PSplineBlearner,
degree = 3, n.knots = 10, penalty = 2, differences = 2, i.am.not.used = NULL))

instantiateData = function (X) {
return(X);
}
trainFun = function (y, X) {
return(solve(t(X) %*% X) %*% t(X) %*% y)
}
predictFun = function (model, newdata) {
return(newdata %*% model)
}
extractParameter = function (model) {
return(model)
}

expect_warning(cboost$addBaselearner("Sepal.Length", "custom", CustomBlearner, instantiate.fun = instantiateData,
train.fun = trainFun, predict.fun = predictFun, param.fun = extractParameter, i.am.not.used = NULL))

expect_silent({ Rcpp::sourceCpp(code = getCustomCppExample(silent = TRUE)) })
expect_warning(cboost$addBaselearner("Sepal.Length", "custom", CustomCppBlearner, instantiate.ptr = dataFunSetter(),
train.ptr = trainFunSetter(), predict.ptr = predictFunSetter(), i.am.not.used = NULL))
})


test_that("default values are used by handler", {

expect_silent({
cboost = Compboost$new(iris, "Sepal.Width", loss = QuadraticLoss$new())
})
expect_silent(cboost$addBaselearner("Sepal.Length", "linear", PolynomialBlearner))
expect_silent(cboost$addBaselearner("Petal.Length", "spline", PSplineBlearner))

})

0 comments on commit 8940f9e

Please sign in to comment.