Skip to content

Commit

Permalink
Merge pull request #614 from schistyakov/xgbTree-weights
Browse files Browse the repository at this point in the history
Add weights support to xgbTree model
  • Loading branch information
topepo committed Mar 15, 2017
2 parents 966a10f + 677bf22 commit dd36518
Show file tree
Hide file tree
Showing 3 changed files with 35 additions and 3 deletions.
25 changes: 25 additions & 0 deletions RegressionTests/Code/xgbTree.R
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,8 @@ testing <- twoClassSim(500, linearVars = 2)
trainX <- training[, -ncol(training)]
trainY <- training$Class

training_weight <- c(rep(0.1, 10), rep(1, 90))

cctrl1 <- trainControl(method = "cv", number = 3, returnResamp = "all",
classProbs = TRUE,
summaryFunction = twoClassSummary)
Expand All @@ -45,11 +47,34 @@ test_class_cv_form <- train(Class ~ ., data = training,
preProc = c("center", "scale"),
tuneGrid = xgbGrid)

set.seed(849)
test_class_cv_model_weight <- train(trainX, trainY,
weights = training_weight,
method = "xgbTree",
trControl = cctrl1,
metric = "ROC",
preProc = c("center", "scale"),
tuneGrid = xgbGrid)

set.seed(849)
test_class_cv_form_weight <- train(Class ~ ., data = training,
weights = training_weight,
method = "xgbTree",
trControl = cctrl1,
metric = "ROC",
preProc = c("center", "scale"),
tuneGrid = xgbGrid)

test_class_pred <- predict(test_class_cv_model, testing[, -ncol(testing)])
test_class_prob <- predict(test_class_cv_model, testing[, -ncol(testing)], type = "prob")
test_class_pred_form <- predict(test_class_cv_form, testing[, -ncol(testing)])
test_class_prob_form <- predict(test_class_cv_form, testing[, -ncol(testing)], type = "prob")

test_class_pred_weight <- predict(test_class_cv_model_weight, testing[, -ncol(testing)])
test_class_prob_weight <- predict(test_class_cv_model_weight, testing[, -ncol(testing)], type = "prob")
test_class_pred_form_weight <- predict(test_class_cv_form_weight, testing[, -ncol(testing)])
test_class_prob_form_weight <- predict(test_class_cv_form_weight, testing[, -ncol(testing)], type = "prob")

set.seed(849)
test_class_rand <- train(trainX, trainY,
method = "xgbTree",
Expand Down
8 changes: 7 additions & 1 deletion models/files/xgbTree.R
Original file line number Diff line number Diff line change
Expand Up @@ -57,6 +57,8 @@ modelInfo <- list(label = "eXtreme Gradient Boosting",
if(length(lev) == 2) {
y <- ifelse(y == lev[1], 1, 0)
dat <- xgb.DMatrix(x, label = y, missing = NA)
if (!is.null(wts))
xgboost::setinfo(dat, 'weight', wts)
out <- xgb.train(list(eta = param$eta,
max_depth = param$max_depth,
gamma = param$gamma,
Expand All @@ -70,6 +72,8 @@ modelInfo <- list(label = "eXtreme Gradient Boosting",
} else {
y <- as.numeric(y) - 1
dat <- xgb.DMatrix(x, label = y, missing = NA)
if (!is.null(wts))
xgboost::setinfo(dat, 'weight', wts)
out <- xgb.train(list(eta = param$eta,
max_depth = param$max_depth,
gamma = param$gamma,
Expand All @@ -84,6 +88,8 @@ modelInfo <- list(label = "eXtreme Gradient Boosting",
}
} else {
dat <- xgb.DMatrix(as.matrix(x), label = y, missing = NA)
if (!is.null(wts))
xgboost::setinfo(dat, 'weight', wts)
out <- xgb.train(list(eta = param$eta,
max_depth = param$max_depth,
gamma = param$gamma,
Expand Down Expand Up @@ -193,7 +199,7 @@ modelInfo <- list(label = "eXtreme Gradient Boosting",
imp
},
levels = function(x) x$obsLevels,
tags = c("Tree-Based Model", "Boosting", "Ensemble Model", "Implicit Feature Selection"),
tags = c("Tree-Based Model", "Boosting", "Ensemble Model", "Implicit Feature Selection", "Accepts Case Weights"),
sort = function(x) {
# This is a toss-up, but the # trees probably adds
# complexity faster than number of splits
Expand Down
5 changes: 3 additions & 2 deletions pkg/caret/inst/NEWS.Rd
Original file line number Diff line number Diff line change
Expand Up @@ -7,13 +7,14 @@
\section{Changes in version 6.0-74}{
\itemize{
\item Monotone multi-layer perceptron neural network models from the \cpkg{monmlp} package were added \issue{489}
\item A new resampling function (\code{groupKFold}) was added \issue(540)
\item The bootstrap optimism estimate was added by Alexis Sarda \issue(544)
\item A new resampling function (\code{groupKFold}) was added \issue{540}
\item The bootstrap optimism estimate was added by Alexis Sarda \issue{544}
\item Bugs in \code{glm}, \code{glm.nb}, and \code{lm} variable importance methods that occur when a single variable is in the model \issue{543}
\item A bug in \code{filterVarImp} was fixed where the ROC curve AUC could be much less than 0.50 because the directionality of the predictor was not taken into account. This will artificially increase the importance of some non-informative predictors. However, the bug might report the AUC for an important predictor to be 0.20 instead of 0.80. \issue{565}
\item \code{multiClassSummary} now reports the average F score \issue{566}
\item The \code{RMSE} and \code{R2} are now (re)exposed to the users \issue{563}
\item A \cpkg{caret} bug was discovered by Jiebiao Wang where \code{glmboost}, \code{gamboost}, and \code{blackboost} models incorrectly reported the class probabilities \issue{560}
\item Training data weights support was added to \code{xgbTree} model by schistyakov
\item Regularised logistic regression through Liblinear (\code{LiblineaR::LiblineaR}) using L1 or L2 regularisation were added
}
}
Expand Down

0 comments on commit dd36518

Please sign in to comment.