From df9a9188b397263b8bab1fef6e9cdada665dd824 Mon Sep 17 00:00:00 2001 From: topepo Date: Sun, 1 Dec 2019 10:30:46 -0500 Subject: [PATCH 1/2] added skips for glmnet when < 3.6.0 --- tests/testthat/helper-objects.R | 2 ++ tests/testthat/test_linear_reg_glmnet.R | 6 ++++++ tests/testthat/test_logistic_reg_glmnet.R | 8 ++++++++ tests/testthat/test_multinom_reg_glmnet.R | 6 ++++++ tests/testthat/test_predict_formats.R | 3 +++ 5 files changed, 25 insertions(+) diff --git a/tests/testthat/helper-objects.R b/tests/testthat/helper-objects.R index 615b665a3..fed44e9ff 100644 --- a/tests/testthat/helper-objects.R +++ b/tests/testthat/helper-objects.R @@ -1,3 +1,5 @@ ctrl <- control_parsnip(verbosity = 1, catch = FALSE) caught_ctrl <- control_parsnip(verbosity = 1, catch = TRUE) quiet_ctrl <- control_parsnip(verbosity = 0, catch = TRUE) + +run_glmnet <- utils::compareVersion('3.6.0', as.character(getRversion())) < 0 diff --git a/tests/testthat/test_linear_reg_glmnet.R b/tests/testthat/test_linear_reg_glmnet.R index cc21796e9..e88664c47 100644 --- a/tests/testthat/test_linear_reg_glmnet.R +++ b/tests/testthat/test_linear_reg_glmnet.R @@ -20,6 +20,7 @@ no_lambda <- linear_reg(mixture = .3) %>% test_that('glmnet execution', { skip_if_not_installed("glmnet") + skip_if(run_glmnet) expect_error( res <- fit_xy( @@ -56,6 +57,7 @@ test_that('glmnet execution', { test_that('glmnet prediction, single lambda', { skip_if_not_installed("glmnet") + skip_if(run_glmnet) res_xy <- fit_xy( iris_basic, @@ -86,6 +88,7 @@ test_that('glmnet prediction, single lambda', { test_that('glmnet prediction, multiple lambda', { skip_if_not_installed("glmnet") + skip_if(run_glmnet) lams <- c(.01, 0.1) @@ -182,6 +185,7 @@ test_that('glmnet prediction, multiple lambda', { test_that('glmnet prediction, all lambda', { skip_if_not_installed("glmnet") + skip_if(run_glmnet) iris_all <- linear_reg(mixture = .3) %>% set_engine("glmnet") @@ -230,6 +234,7 @@ test_that('glmnet prediction, all lambda', { test_that('submodel prediction', { skip_if_not_installed("glmnet") + skip_if(run_glmnet) reg_fit <- linear_reg() %>% @@ -272,6 +277,7 @@ test_that('submodel prediction', { test_that('error traps', { skip_if_not_installed("glmnet") + skip_if(run_glmnet) expect_error( linear_reg() %>% diff --git a/tests/testthat/test_logistic_reg_glmnet.R b/tests/testthat/test_logistic_reg_glmnet.R index 9e4168db0..6a4b36f9c 100644 --- a/tests/testthat/test_logistic_reg_glmnet.R +++ b/tests/testthat/test_logistic_reg_glmnet.R @@ -21,6 +21,7 @@ lc_basic <- logistic_reg() %>% set_engine("glmnet") test_that('glmnet execution', { skip_if_not_installed("glmnet") + skip_if(run_glmnet) expect_error( res <- fit_xy( @@ -48,6 +49,7 @@ test_that('glmnet execution', { test_that('glmnet prediction, one lambda', { skip_if_not_installed("glmnet") + skip_if(run_glmnet) xy_fit <- fit_xy( logistic_reg(penalty = 0.1) %>% set_engine("glmnet"), @@ -95,6 +97,7 @@ test_that('glmnet prediction, one lambda', { test_that('glmnet prediction, mulitiple lambda', { skip_if_not_installed("glmnet") + skip_if(run_glmnet) lams <- c(0.01, 0.1) @@ -158,6 +161,7 @@ test_that('glmnet prediction, mulitiple lambda', { test_that('glmnet prediction, no lambda', { skip_if_not_installed("glmnet") + skip_if(run_glmnet) xy_fit <- fit_xy( logistic_reg() %>% set_engine("glmnet", nlambda = 11), @@ -217,6 +221,7 @@ test_that('glmnet prediction, no lambda', { test_that('glmnet probabilities, one lambda', { skip_if_not_installed("glmnet") + skip_if(run_glmnet) xy_fit <- fit_xy( logistic_reg(penalty = 0.1) %>% set_engine("glmnet"), @@ -265,6 +270,7 @@ test_that('glmnet probabilities, one lambda', { test_that('glmnet probabilities, mulitiple lambda', { skip_if_not_installed("glmnet") + skip_if(run_glmnet) lams <- c(0.01, 0.1) @@ -329,6 +335,7 @@ test_that('glmnet probabilities, mulitiple lambda', { test_that('glmnet probabilities, no lambda', { skip_if_not_installed("glmnet") + skip_if(run_glmnet) xy_fit <- fit_xy( logistic_reg() %>% set_engine("glmnet"), @@ -389,6 +396,7 @@ test_that('glmnet probabilities, no lambda', { test_that('submodel prediction', { skip_if_not_installed("glmnet") + skip_if(run_glmnet) vars <- c("female", "tenure", "total_charges", "phone_service", "monthly_charges") class_fit <- diff --git a/tests/testthat/test_multinom_reg_glmnet.R b/tests/testthat/test_multinom_reg_glmnet.R index ef5f3444f..240373d76 100644 --- a/tests/testthat/test_multinom_reg_glmnet.R +++ b/tests/testthat/test_multinom_reg_glmnet.R @@ -15,6 +15,7 @@ rows <- c(1, 51, 101) test_that('glmnet execution', { skip_if_not_installed("glmnet") + skip_if(run_glmnet) expect_error( res <- fit_xy( @@ -43,6 +44,7 @@ test_that('glmnet execution', { test_that('glmnet prediction, one lambda', { skip_if_not_installed("glmnet") + skip_if(run_glmnet) xy_fit <- fit_xy( multinom_reg(penalty = 0.1) %>% set_engine("glmnet"), @@ -85,6 +87,7 @@ test_that('glmnet prediction, one lambda', { test_that('glmnet probabilities, mulitiple lambda', { skip_if_not_installed("glmnet") + skip_if(run_glmnet) lams <- c(0.01, 0.1) @@ -149,6 +152,9 @@ test_that('glmnet probabilities, mulitiple lambda', { }) test_that("class predictions are factors with all levels", { + skip_if_not_installed("glmnet") + skip_if(run_glmnet) + basic <- multinom_reg() %>% set_engine("glmnet") %>% fit(Species ~ ., data = iris) nd <- iris[iris$Species == "setosa", ] yhat <- predict(basic, new_data = nd, penalty = .1) diff --git a/tests/testthat/test_predict_formats.R b/tests/testthat/test_predict_formats.R index eb83643cf..442650486 100644 --- a/tests/testthat/test_predict_formats.R +++ b/tests/testthat/test_predict_formats.R @@ -62,6 +62,8 @@ test_that('non-standard levels', { test_that('non-factor classification', { + skip_if(run_glmnet) + expect_error( logistic_reg() %>% set_engine("glm") %>% @@ -72,6 +74,7 @@ test_that('non-factor classification', { set_engine("glm") %>% fit(Species ~ ., data = iris %>% mutate(Species = ifelse(Species == "setosa", 1, 0))) ) + expect_error( multinom_reg() %>% set_engine("glmnet") %>% From e71574e67ddc115bb50f690cba63b86c1ce8efa8 Mon Sep 17 00:00:00 2001 From: topepo Date: Sun, 1 Dec 2019 10:47:24 -0500 Subject: [PATCH 2/2] remove glmnet form suggests --- DESCRIPTION | 3 +-- man/C5.0_train.Rd | 3 +-- man/boost_tree.Rd | 31 ++++++++++++++++++++++++------- man/check_times.Rd | 2 +- man/decision_tree.Rd | 22 ++++++++++++++++------ man/fit.Rd | 6 ++---- man/keras_mlp.Rd | 14 +++++++++++--- man/linear_reg.Rd | 10 ++++++++-- man/logistic_reg.Rd | 10 ++++++++-- man/mars.Rd | 21 ++++++++++++++++----- man/mlp.Rd | 26 ++++++++++++++++++++------ man/model_spec.Rd | 2 ++ man/multi_predict.Rd | 21 +++++++-------------- man/multinom_reg.Rd | 10 ++++++++-- man/nearest_neighbor.Rd | 8 ++++++-- man/predict.model_fit.Rd | 3 +-- man/rand_forest.Rd | 20 +++++++++++++------- man/rpart_train.Rd | 11 +++++++++-- man/surv_reg.Rd | 3 +-- man/svm_poly.Rd | 24 ++++++++++++++++++------ man/svm_rbf.Rd | 14 ++++++++++---- man/type_sum.model_spec.Rd | 6 +++--- man/xgb_train.Rd | 15 ++++++++++++--- 23 files changed, 198 insertions(+), 87 deletions(-) diff --git a/DESCRIPTION b/DESCRIPTION index 9beff4c57..8c9e46b8f 100644 --- a/DESCRIPTION +++ b/DESCRIPTION @@ -31,7 +31,7 @@ Imports: prettyunits, vctrs (>= 0.2.0) Roxygen: list(markdown = TRUE) -RoxygenNote: 6.1.99.9001 +RoxygenNote: 7.0.1.9000 Suggests: testthat, knitr, @@ -43,7 +43,6 @@ Suggests: C50, sparklyr (>= 1.0.0), earth, - glmnet, kernlab, kknn, randomForest, diff --git a/man/C5.0_train.Rd b/man/C5.0_train.Rd index 9a3e2f0d1..8b2af1536 100644 --- a/man/C5.0_train.Rd +++ b/man/C5.0_train.Rd @@ -4,8 +4,7 @@ \alias{C5.0_train} \title{Boosted trees via C5.0} \usage{ -C5.0_train(x, y, weights = NULL, trials = 15, minCases = 2, - sample = 0, ...) +C5.0_train(x, y, weights = NULL, trials = 15, minCases = 2, sample = 0, ...) } \arguments{ \item{x}{A data frame or matrix of predictors.} diff --git a/man/boost_tree.Rd b/man/boost_tree.Rd index 5c2d553b8..34012fd39 100644 --- a/man/boost_tree.Rd +++ b/man/boost_tree.Rd @@ -5,13 +5,30 @@ \alias{update.boost_tree} \title{General Interface for Boosted Trees} \usage{ -boost_tree(mode = "unknown", mtry = NULL, trees = NULL, - min_n = NULL, tree_depth = NULL, learn_rate = NULL, - loss_reduction = NULL, sample_size = NULL) - -\method{update}{boost_tree}(object, parameters = NULL, mtry = NULL, - trees = NULL, min_n = NULL, tree_depth = NULL, learn_rate = NULL, - loss_reduction = NULL, sample_size = NULL, fresh = FALSE, ...) +boost_tree( + mode = "unknown", + mtry = NULL, + trees = NULL, + min_n = NULL, + tree_depth = NULL, + learn_rate = NULL, + loss_reduction = NULL, + sample_size = NULL +) + +\method{update}{boost_tree}( + object, + parameters = NULL, + mtry = NULL, + trees = NULL, + min_n = NULL, + tree_depth = NULL, + learn_rate = NULL, + loss_reduction = NULL, + sample_size = NULL, + fresh = FALSE, + ... +) } \arguments{ \item{mode}{A single character string for the type of model. diff --git a/man/check_times.Rd b/man/check_times.Rd index e8a306e1c..07828295e 100644 --- a/man/check_times.Rd +++ b/man/check_times.Rd @@ -50,7 +50,7 @@ directory. directory. \item \code{testthat_size}: The size on disk of files in the \code{testthat} directory. -\item \code{check_time}: The time (in seconds) to run \code{R CMD check} +\item \code{check_time}: The time (in seconds) to run \verb{R CMD check} using the "r-devel-windows-ix86+x86_64` flavor. \item \code{status}: An indicator for whether the tests completed. } diff --git a/man/decision_tree.Rd b/man/decision_tree.Rd index 68d810b28..60205d115 100644 --- a/man/decision_tree.Rd +++ b/man/decision_tree.Rd @@ -5,12 +5,22 @@ \alias{update.decision_tree} \title{General Interface for Decision Tree Models} \usage{ -decision_tree(mode = "unknown", cost_complexity = NULL, - tree_depth = NULL, min_n = NULL) - -\method{update}{decision_tree}(object, parameters = NULL, - cost_complexity = NULL, tree_depth = NULL, min_n = NULL, - fresh = FALSE, ...) +decision_tree( + mode = "unknown", + cost_complexity = NULL, + tree_depth = NULL, + min_n = NULL +) + +\method{update}{decision_tree}( + object, + parameters = NULL, + cost_complexity = NULL, + tree_depth = NULL, + min_n = NULL, + fresh = FALSE, + ... +) } \arguments{ \item{mode}{A single character string for the type of model. diff --git a/man/fit.Rd b/man/fit.Rd index 4f4f8dda3..2c284e8cb 100644 --- a/man/fit.Rd +++ b/man/fit.Rd @@ -5,11 +5,9 @@ \alias{fit_xy.model_spec} \title{Fit a Model Specification to a Dataset} \usage{ -\method{fit}{model_spec}(object, formula = NULL, data = NULL, - control = control_parsnip(), ...) +\method{fit}{model_spec}(object, formula = NULL, data = NULL, control = control_parsnip(), ...) -\method{fit_xy}{model_spec}(object, x = NULL, y = NULL, - control = control_parsnip(), ...) +\method{fit_xy}{model_spec}(object, x = NULL, y = NULL, control = control_parsnip(), ...) } \arguments{ \item{object}{An object of class \code{model_spec} that has a chosen engine diff --git a/man/keras_mlp.Rd b/man/keras_mlp.Rd index 4972655aa..f0d516401 100644 --- a/man/keras_mlp.Rd +++ b/man/keras_mlp.Rd @@ -4,9 +4,17 @@ \alias{keras_mlp} \title{Simple interface to MLP models via keras} \usage{ -keras_mlp(x, y, hidden_units = 5, decay = 0, dropout = 0, - epochs = 20, act = "softmax", seeds = sample.int(10^5, size = 3), - ...) +keras_mlp( + x, + y, + hidden_units = 5, + decay = 0, + dropout = 0, + epochs = 20, + act = "softmax", + seeds = sample.int(10^5, size = 3), + ... +) } \arguments{ \item{x}{A data frame or matrix of predictors} diff --git a/man/linear_reg.Rd b/man/linear_reg.Rd index 909ebbbef..53ee53983 100644 --- a/man/linear_reg.Rd +++ b/man/linear_reg.Rd @@ -7,8 +7,14 @@ \usage{ linear_reg(mode = "regression", penalty = NULL, mixture = NULL) -\method{update}{linear_reg}(object, parameters = NULL, penalty = NULL, - mixture = NULL, fresh = FALSE, ...) +\method{update}{linear_reg}( + object, + parameters = NULL, + penalty = NULL, + mixture = NULL, + fresh = FALSE, + ... +) } \arguments{ \item{mode}{A single character string for the type of model. diff --git a/man/logistic_reg.Rd b/man/logistic_reg.Rd index 7c42f4c4d..ec63eb800 100644 --- a/man/logistic_reg.Rd +++ b/man/logistic_reg.Rd @@ -7,8 +7,14 @@ \usage{ logistic_reg(mode = "classification", penalty = NULL, mixture = NULL) -\method{update}{logistic_reg}(object, parameters = NULL, - penalty = NULL, mixture = NULL, fresh = FALSE, ...) +\method{update}{logistic_reg}( + object, + parameters = NULL, + penalty = NULL, + mixture = NULL, + fresh = FALSE, + ... +) } \arguments{ \item{mode}{A single character string for the type of model. diff --git a/man/mars.Rd b/man/mars.Rd index e119d33a5..5d841f60d 100644 --- a/man/mars.Rd +++ b/man/mars.Rd @@ -5,11 +5,22 @@ \alias{update.mars} \title{General Interface for MARS} \usage{ -mars(mode = "unknown", num_terms = NULL, prod_degree = NULL, - prune_method = NULL) - -\method{update}{mars}(object, parameters = NULL, num_terms = NULL, - prod_degree = NULL, prune_method = NULL, fresh = FALSE, ...) +mars( + mode = "unknown", + num_terms = NULL, + prod_degree = NULL, + prune_method = NULL +) + +\method{update}{mars}( + object, + parameters = NULL, + num_terms = NULL, + prod_degree = NULL, + prune_method = NULL, + fresh = FALSE, + ... +) } \arguments{ \item{mode}{A single character string for the type of model. diff --git a/man/mlp.Rd b/man/mlp.Rd index 7cdc4e692..11cd3fca8 100644 --- a/man/mlp.Rd +++ b/man/mlp.Rd @@ -5,12 +5,26 @@ \alias{update.mlp} \title{General Interface for Single Layer Neural Network} \usage{ -mlp(mode = "unknown", hidden_units = NULL, penalty = NULL, - dropout = NULL, epochs = NULL, activation = NULL) - -\method{update}{mlp}(object, parameters = NULL, hidden_units = NULL, - penalty = NULL, dropout = NULL, epochs = NULL, activation = NULL, - fresh = FALSE, ...) +mlp( + mode = "unknown", + hidden_units = NULL, + penalty = NULL, + dropout = NULL, + epochs = NULL, + activation = NULL +) + +\method{update}{mlp}( + object, + parameters = NULL, + hidden_units = NULL, + penalty = NULL, + dropout = NULL, + epochs = NULL, + activation = NULL, + fresh = FALSE, + ... +) } \arguments{ \item{mode}{A single character string for the type of model. diff --git a/man/model_spec.Rd b/man/model_spec.Rd index 8202721fc..54e19ac94 100644 --- a/man/model_spec.Rd +++ b/man/model_spec.Rd @@ -21,6 +21,7 @@ can \code{varying()}. If left to their defaults (\code{NULL}), the arguments will use the underlying model functions default value. As discussed below, the arguments in \code{args} are captured as quosures and are not immediately executed. +\itemize{ \item \code{...}: Optional model-function-specific parameters. As with \code{args}, these will be quosures and can be \code{varying()}. @@ -39,6 +40,7 @@ type. This class and structure is the basis for how \pkg{parsnip} stores model objects prior to seeing the data. } +} \section{Argument Details}{ diff --git a/man/multi_predict.Rd b/man/multi_predict.Rd index d3ad7ac0f..38ee25bd0 100644 --- a/man/multi_predict.Rd +++ b/man/multi_predict.Rd @@ -18,26 +18,19 @@ multi_predict(object, ...) \method{multi_predict}{default}(object, ...) -\method{multi_predict}{_xgb.Booster}(object, new_data, type = NULL, - trees = NULL, ...) +\method{multi_predict}{`_xgb.Booster`}(object, new_data, type = NULL, trees = NULL, ...) -\method{multi_predict}{_C5.0}(object, new_data, type = NULL, - trees = NULL, ...) +\method{multi_predict}{`_C5.0`}(object, new_data, type = NULL, trees = NULL, ...) -\method{multi_predict}{_elnet}(object, new_data, type = NULL, - penalty = NULL, ...) +\method{multi_predict}{`_elnet`}(object, new_data, type = NULL, penalty = NULL, ...) -\method{multi_predict}{_lognet}(object, new_data, type = NULL, - penalty = NULL, ...) +\method{multi_predict}{`_lognet`}(object, new_data, type = NULL, penalty = NULL, ...) -\method{multi_predict}{_earth}(object, new_data, type = NULL, - num_terms = NULL, ...) +\method{multi_predict}{`_earth`}(object, new_data, type = NULL, num_terms = NULL, ...) -\method{multi_predict}{_multnet}(object, new_data, type = NULL, - penalty = NULL, ...) +\method{multi_predict}{`_multnet`}(object, new_data, type = NULL, penalty = NULL, ...) -\method{multi_predict}{_train.kknn}(object, new_data, type = NULL, - neighbors = NULL, ...) +\method{multi_predict}{`_train.kknn`}(object, new_data, type = NULL, neighbors = NULL, ...) } \arguments{ \item{object}{A \code{model_fit} object.} diff --git a/man/multinom_reg.Rd b/man/multinom_reg.Rd index ed897f1aa..00cf5a622 100644 --- a/man/multinom_reg.Rd +++ b/man/multinom_reg.Rd @@ -7,8 +7,14 @@ \usage{ multinom_reg(mode = "classification", penalty = NULL, mixture = NULL) -\method{update}{multinom_reg}(object, parameters = NULL, - penalty = NULL, mixture = NULL, fresh = FALSE, ...) +\method{update}{multinom_reg}( + object, + parameters = NULL, + penalty = NULL, + mixture = NULL, + fresh = FALSE, + ... +) } \arguments{ \item{mode}{A single character string for the type of model. diff --git a/man/nearest_neighbor.Rd b/man/nearest_neighbor.Rd index 0884f837d..3973b146e 100644 --- a/man/nearest_neighbor.Rd +++ b/man/nearest_neighbor.Rd @@ -4,8 +4,12 @@ \alias{nearest_neighbor} \title{General Interface for K-Nearest Neighbor Models} \usage{ -nearest_neighbor(mode = "unknown", neighbors = NULL, - weight_func = NULL, dist_power = NULL) +nearest_neighbor( + mode = "unknown", + neighbors = NULL, + weight_func = NULL, + dist_power = NULL +) } \arguments{ \item{mode}{A single character string for the type of model. diff --git a/man/predict.model_fit.Rd b/man/predict.model_fit.Rd index 9999429d8..747cb3cfc 100644 --- a/man/predict.model_fit.Rd +++ b/man/predict.model_fit.Rd @@ -4,8 +4,7 @@ \alias{predict.model_fit} \title{Model predictions} \usage{ -\method{predict}{model_fit}(object, new_data, type = NULL, - opts = list(), ...) +\method{predict}{model_fit}(object, new_data, type = NULL, opts = list(), ...) } \arguments{ \item{object}{An object of class \code{model_fit}} diff --git a/man/rand_forest.Rd b/man/rand_forest.Rd index 33584e137..4f862458c 100644 --- a/man/rand_forest.Rd +++ b/man/rand_forest.Rd @@ -5,11 +5,17 @@ \alias{update.rand_forest} \title{General Interface for Random Forest Models} \usage{ -rand_forest(mode = "unknown", mtry = NULL, trees = NULL, - min_n = NULL) - -\method{update}{rand_forest}(object, parameters = NULL, mtry = NULL, - trees = NULL, min_n = NULL, fresh = FALSE, ...) +rand_forest(mode = "unknown", mtry = NULL, trees = NULL, min_n = NULL) + +\method{update}{rand_forest}( + object, + parameters = NULL, + mtry = NULL, + trees = NULL, + min_n = NULL, + fresh = FALSE, + ... +) } \arguments{ \item{mode}{A single character string for the type of model. @@ -110,9 +116,9 @@ model, the template of the fit calls are:: \Sexpr[results=rd]{parsnip:::show_fit(parsnip:::rand_forest(mode = "regression"), "spark")} For \pkg{ranger} confidence intervals, the intervals are -constructed using the form \code{estimate +/- z * std_error}. For +constructed using the form \verb{estimate +/- z * std_error}. For classification probabilities, these values can fall outside of -\code{[0, 1]} and will be coerced to be in this range. +\verb{[0, 1]} and will be coerced to be in this range. } \examples{ diff --git a/man/rpart_train.Rd b/man/rpart_train.Rd index 6ce88fb66..a55440b08 100644 --- a/man/rpart_train.Rd +++ b/man/rpart_train.Rd @@ -4,8 +4,15 @@ \alias{rpart_train} \title{Decision trees via rpart} \usage{ -rpart_train(formula, data, weights = NULL, cp = 0.01, minsplit = 20, - maxdepth = 30, ...) +rpart_train( + formula, + data, + weights = NULL, + cp = 0.01, + minsplit = 20, + maxdepth = 30, + ... +) } \arguments{ \item{formula}{A model formula.} diff --git a/man/surv_reg.Rd b/man/surv_reg.Rd index 6b8084923..61396a799 100644 --- a/man/surv_reg.Rd +++ b/man/surv_reg.Rd @@ -7,8 +7,7 @@ \usage{ surv_reg(mode = "regression", dist = NULL) -\method{update}{surv_reg}(object, parameters = NULL, dist = NULL, - fresh = FALSE, ...) +\method{update}{surv_reg}(object, parameters = NULL, dist = NULL, fresh = FALSE, ...) } \arguments{ \item{mode}{A single character string for the type of model. diff --git a/man/svm_poly.Rd b/man/svm_poly.Rd index cf9bcb955..5ecfe1af6 100644 --- a/man/svm_poly.Rd +++ b/man/svm_poly.Rd @@ -5,12 +5,24 @@ \alias{update.svm_poly} \title{General interface for polynomial support vector machines} \usage{ -svm_poly(mode = "unknown", cost = NULL, degree = NULL, - scale_factor = NULL, margin = NULL) - -\method{update}{svm_poly}(object, parameters = NULL, cost = NULL, - degree = NULL, scale_factor = NULL, margin = NULL, fresh = FALSE, - ...) +svm_poly( + mode = "unknown", + cost = NULL, + degree = NULL, + scale_factor = NULL, + margin = NULL +) + +\method{update}{svm_poly}( + object, + parameters = NULL, + cost = NULL, + degree = NULL, + scale_factor = NULL, + margin = NULL, + fresh = FALSE, + ... +) } \arguments{ \item{mode}{A single character string for the type of model. diff --git a/man/svm_rbf.Rd b/man/svm_rbf.Rd index b619ec52e..157f6625e 100644 --- a/man/svm_rbf.Rd +++ b/man/svm_rbf.Rd @@ -5,11 +5,17 @@ \alias{update.svm_rbf} \title{General interface for radial basis function support vector machines} \usage{ -svm_rbf(mode = "unknown", cost = NULL, rbf_sigma = NULL, - margin = NULL) +svm_rbf(mode = "unknown", cost = NULL, rbf_sigma = NULL, margin = NULL) -\method{update}{svm_rbf}(object, parameters = NULL, cost = NULL, - rbf_sigma = NULL, margin = NULL, fresh = FALSE, ...) +\method{update}{svm_rbf}( + object, + parameters = NULL, + cost = NULL, + rbf_sigma = NULL, + margin = NULL, + fresh = FALSE, + ... +) } \arguments{ \item{mode}{A single character string for the type of model. diff --git a/man/type_sum.model_spec.Rd b/man/type_sum.model_spec.Rd index 24571a2ab..d8c176259 100644 --- a/man/type_sum.model_spec.Rd +++ b/man/type_sum.model_spec.Rd @@ -20,12 +20,12 @@ A character value. columns. } \details{ -For \code{model_spec} objects, the summary is "\code{spec[?]}" -or "\code{spec[+]}". The former indicates that either the model +For \code{model_spec} objects, the summary is "\verb{spec[?]}" +or "\verb{spec[+]}". The former indicates that either the model mode has not been declared or that the specification has \code{varying()} parameters. Otherwise, the latter is shown. -For fitted models, either "\code{fit[x]}" or "\code{fit[+]}" are used +For fitted models, either "\code{fit[x]}" or "\verb{fit[+]}" are used where the "x" implies that the model fit failed in some way. } \keyword{internal} diff --git a/man/xgb_train.Rd b/man/xgb_train.Rd index 5eeab7841..87a3e4163 100644 --- a/man/xgb_train.Rd +++ b/man/xgb_train.Rd @@ -4,9 +4,18 @@ \alias{xgb_train} \title{Boosted trees via xgboost} \usage{ -xgb_train(x, y, max_depth = 6, nrounds = 15, eta = 0.3, - colsample_bytree = 1, min_child_weight = 1, gamma = 0, - subsample = 1, ...) +xgb_train( + x, + y, + max_depth = 6, + nrounds = 15, + eta = 0.3, + colsample_bytree = 1, + min_child_weight = 1, + gamma = 0, + subsample = 1, + ... +) } \arguments{ \item{x}{A data frame or matrix of predictors}