diff --git a/man/details_auto_ml_h2o.Rd b/man/details_auto_ml_h2o.Rd
index 4c21dad53..b567c3ef4 100644
--- a/man/details_auto_ml_h2o.Rd
+++ b/man/details_auto_ml_h2o.Rd
@@ -70,8 +70,8 @@ performance assessment and potential early stopping.
Factor/categorical predictors need to be converted to numeric values
(e.g., dummy or indicator variables) for this engine. When using the
-formula method via \code{\link[=fit.model_spec]{fit()}}, parsnip
-will convert factor columns to indicators.
+formula method via \code{\link[=fit.model_spec]{fit()}}, parsnip will
+convert factor columns to indicators.
}
\subsection{Initializing h2o}{
diff --git a/man/details_boost_tree_h2o.Rd b/man/details_boost_tree_h2o.Rd
index 8c4e9c214..3aba6bb5b 100644
--- a/man/details_boost_tree_h2o.Rd
+++ b/man/details_boost_tree_h2o.Rd
@@ -72,6 +72,7 @@ The \strong{agua} extension package is required to fit this model.
##
## Model fit template:
## agua::h2o_train_xgboost(x = missing_arg(), y = missing_arg(),
+## weights = missing_arg(), validation_frame = missing_arg(),
## col_sample_rate = integer(), ntrees = integer(), min_rows = integer(),
## max_depth = integer(), learn_rate = numeric(), min_split_improvement = numeric(),
## stopping_rounds = integer())
@@ -106,6 +107,7 @@ The \strong{agua} extension package is required to fit this model.
##
## Model fit template:
## agua::h2o_train_xgboost(x = missing_arg(), y = missing_arg(),
+## weights = missing_arg(), validation_frame = missing_arg(),
## col_sample_rate = integer(), ntrees = integer(), min_rows = integer(),
## max_depth = integer(), learn_rate = numeric(), min_split_improvement = numeric(),
## stopping_rounds = integer())
diff --git a/man/details_linear_reg_gls.Rd b/man/details_linear_reg_gls.Rd
index e6135b979..1f201e0ba 100644
--- a/man/details_linear_reg_gls.Rd
+++ b/man/details_linear_reg_gls.Rd
@@ -180,6 +180,7 @@ lme_fit \%>\% tidy() \%>\%
\if{html}{\out{
}}\preformatted{## # A tibble: 0 × 6
## # … with 6 variables: term , estimate , std.error , df ,
## # statistic , p.value
+## # ℹ Use `colnames()` to see all variable names
}\if{html}{\out{
}}
\if{html}{\out{}}\preformatted{# gls:
diff --git a/man/details_linear_reg_h2o.Rd b/man/details_linear_reg_h2o.Rd
index 2ccadd73f..1f5588f28 100644
--- a/man/details_linear_reg_h2o.Rd
+++ b/man/details_linear_reg_h2o.Rd
@@ -51,8 +51,9 @@ wrapper around \code{\link[h2o:h2o.glm]{h2o::h2o.glm()}} with
## Computational engine: h2o
##
## Model fit template:
-## agua::h2o_train_glm(x = missing_arg(), y = missing_arg(), lambda = 1,
-## alpha = 0.5, family = "gaussian")
+## agua::h2o_train_glm(x = missing_arg(), y = missing_arg(), weights = missing_arg(),
+## validation_frame = missing_arg(), lambda = 1, alpha = 0.5,
+## family = "gaussian")
}\if{html}{\out{
}}
}
diff --git a/man/details_logistic_reg_h2o.Rd b/man/details_logistic_reg_h2o.Rd
index 1c48a9a3b..d7b355601 100644
--- a/man/details_logistic_reg_h2o.Rd
+++ b/man/details_logistic_reg_h2o.Rd
@@ -50,7 +50,8 @@ binomial responses.
## Computational engine: h2o
##
## Model fit template:
-## agua::h2o_train_glm(x = missing_arg(), y = missing_arg(), family = "binomial")
+## agua::h2o_train_glm(x = missing_arg(), y = missing_arg(), weights = missing_arg(),
+## validation_frame = missing_arg(), family = "binomial")
}\if{html}{\out{}}
To use a non-default argument in \code{\link[h2o:h2o.glm]{h2o::h2o.glm()}},
@@ -69,7 +70,8 @@ pass in as an engine argument to \code{set_engine()}:
## Computational engine: h2o
##
## Model fit template:
-## agua::h2o_train_glm(x = missing_arg(), y = missing_arg(), compute_p_values = TRUE,
+## agua::h2o_train_glm(x = missing_arg(), y = missing_arg(), weights = missing_arg(),
+## validation_frame = missing_arg(), compute_p_values = TRUE,
## family = "binomial")
}\if{html}{\out{}}
}
diff --git a/man/details_mlp_h2o.Rd b/man/details_mlp_h2o.Rd
index 95c26d9e1..223fe18fd 100644
--- a/man/details_mlp_h2o.Rd
+++ b/man/details_mlp_h2o.Rd
@@ -82,9 +82,10 @@ input layer, which defaults to 0.
## Computational engine: h2o
##
## Model fit template:
-## agua::h2o_train_mlp(x = missing_arg(), y = missing_arg(), hidden = integer(1),
-## l2 = double(1), hidden_dropout_ratios = double(1), epochs = integer(1),
-## activation = character(1), rate = double(1))
+## agua::h2o_train_mlp(x = missing_arg(), y = missing_arg(), weights = missing_arg(),
+## validation_frame = missing_arg(), hidden = integer(1), l2 = double(1),
+## hidden_dropout_ratios = double(1), epochs = integer(1), activation = character(1),
+## rate = double(1))
}\if{html}{\out{}}
}
@@ -116,9 +117,10 @@ input layer, which defaults to 0.
## Computational engine: h2o
##
## Model fit template:
-## agua::h2o_train_mlp(x = missing_arg(), y = missing_arg(), hidden = integer(1),
-## l2 = double(1), hidden_dropout_ratios = double(1), epochs = integer(1),
-## activation = character(1), rate = double(1))
+## agua::h2o_train_mlp(x = missing_arg(), y = missing_arg(), weights = missing_arg(),
+## validation_frame = missing_arg(), hidden = integer(1), l2 = double(1),
+## hidden_dropout_ratios = double(1), epochs = integer(1), activation = character(1),
+## rate = double(1))
}\if{html}{\out{}}
}
diff --git a/man/details_multinom_reg_h2o.Rd b/man/details_multinom_reg_h2o.Rd
index b57919740..265ef2896 100644
--- a/man/details_multinom_reg_h2o.Rd
+++ b/man/details_multinom_reg_h2o.Rd
@@ -52,8 +52,9 @@ a wrapper around \code{\link[h2o:h2o.glm]{h2o::h2o.glm()}} with
## Computational engine: h2o
##
## Model fit template:
-## agua::h2o_train_glm(x = missing_arg(), y = missing_arg(), family = missing_arg(),
-## lambda = double(1), alpha = double(1), family = "multinomial")
+## agua::h2o_train_glm(x = missing_arg(), y = missing_arg(), weights = missing_arg(),
+## validation_frame = missing_arg(), lambda = double(1), alpha = double(1),
+## family = "multinomial")
}\if{html}{\out{}}
}
diff --git a/man/details_naive_Bayes_h2o.Rd b/man/details_naive_Bayes_h2o.Rd
index 81b1db143..1795308f6 100644
--- a/man/details_naive_Bayes_h2o.Rd
+++ b/man/details_naive_Bayes_h2o.Rd
@@ -52,7 +52,8 @@ The \strong{agua} extension package is required to fit this model.
## Computational engine: h2o
##
## Model fit template:
-## agua::h2o_train_nb(x = missing_arg(), y = missing_arg(), laplace = numeric(0))
+## agua::h2o_train_nb(x = missing_arg(), y = missing_arg(), weights = missing_arg(),
+## validation_frame = missing_arg(), laplace = numeric(0))
}\if{html}{\out{}}
}
diff --git a/man/details_poisson_reg_h2o.Rd b/man/details_poisson_reg_h2o.Rd
index 1613d3225..253b305a3 100644
--- a/man/details_poisson_reg_h2o.Rd
+++ b/man/details_poisson_reg_h2o.Rd
@@ -56,8 +56,9 @@ poisson_reg(penalty = double(1), mixture = double(1)) \%>\%
## Computational engine: h2o
##
## Model fit template:
-## agua::h2o_train_glm(x = missing_arg(), y = missing_arg(), lambda = double(1),
-## alpha = double(1), family = "poisson")
+## agua::h2o_train_glm(x = missing_arg(), y = missing_arg(), weights = missing_arg(),
+## validation_frame = missing_arg(), lambda = double(1), alpha = double(1),
+## family = "poisson")
}\if{html}{\out{}}
}
diff --git a/man/details_proportional_hazards_glmnet.Rd b/man/details_proportional_hazards_glmnet.Rd
index f4093afdd..084d2709a 100644
--- a/man/details_proportional_hazards_glmnet.Rd
+++ b/man/details_proportional_hazards_glmnet.Rd
@@ -49,7 +49,7 @@ proportional_hazards(penalty = double(1), mixture = double(1)) \%>\%
## Computational engine: glmnet
##
## Model fit template:
-## censored::glmnet_fit_wrapper(formula = missing_arg(), data = missing_arg(),
+## censored::coxnet_train(formula = missing_arg(), data = missing_arg(),
## weights = missing_arg(), alpha = double(1))
}\if{html}{\out{}}
}
diff --git a/man/details_rand_forest_h2o.Rd b/man/details_rand_forest_h2o.Rd
index 13ec892cf..0bd26fdfc 100644
--- a/man/details_rand_forest_h2o.Rd
+++ b/man/details_rand_forest_h2o.Rd
@@ -51,8 +51,9 @@ regression.
## Computational engine: h2o
##
## Model fit template:
-## agua::h2o_train_rf(x = missing_arg(), y = missing_arg(), mtries = integer(1),
-## ntrees = integer(1), min_rows = integer(1))
+## agua::h2o_train_rf(x = missing_arg(), y = missing_arg(), weights = missing_arg(),
+## validation_frame = missing_arg(), mtries = integer(1), ntrees = integer(1),
+## min_rows = integer(1))
}\if{html}{\out{}}
\code{min_rows()} and \code{min_cols()} will adjust the number of neighbors if the
@@ -81,8 +82,9 @@ chosen value if it is not consistent with the actual data dimensions.
## Computational engine: h2o
##
## Model fit template:
-## agua::h2o_train_rf(x = missing_arg(), y = missing_arg(), mtries = integer(1),
-## ntrees = integer(1), min_rows = integer(1))
+## agua::h2o_train_rf(x = missing_arg(), y = missing_arg(), weights = missing_arg(),
+## validation_frame = missing_arg(), mtries = integer(1), ntrees = integer(1),
+## min_rows = integer(1))
}\if{html}{\out{}}
}
diff --git a/man/details_rule_fit_h2o.Rd b/man/details_rule_fit_h2o.Rd
index a0a5df183..456428c59 100644
--- a/man/details_rule_fit_h2o.Rd
+++ b/man/details_rule_fit_h2o.Rd
@@ -65,7 +65,8 @@ rule_fit(
## Computational engine: h2o
##
## Model fit template:
-## agua::h2o_train_rule(x = missing_arg(), y = missing_arg(), rule_generation_ntrees = integer(1),
+## agua::h2o_train_rule(x = missing_arg(), y = missing_arg(), weights = missing_arg(),
+## validation_frame = missing_arg(), rule_generation_ntrees = integer(1),
## max_rule_length = integer(1), lambda = numeric(1))
}\if{html}{\out{}}
}
@@ -97,7 +98,8 @@ The \strong{agua} extension package is required to fit this model.
## Computational engine: h2o
##
## Model fit template:
-## agua::h2o_train_rule(x = missing_arg(), y = missing_arg(), rule_generation_ntrees = integer(1),
+## agua::h2o_train_rule(x = missing_arg(), y = missing_arg(), weights = missing_arg(),
+## validation_frame = missing_arg(), rule_generation_ntrees = integer(1),
## max_rule_length = integer(1), lambda = numeric(1))
}\if{html}{\out{}}
}
diff --git a/man/glmnet-details.Rd b/man/glmnet-details.Rd
index f45308bd0..c42bc95fe 100644
--- a/man/glmnet-details.Rd
+++ b/man/glmnet-details.Rd
@@ -219,6 +219,7 @@ all_tidy_coefs
## 9 (Intercept) 9 30.3 2.45 0.640
## 10 (Intercept) 10 31.1 2.23 0.673
## # … with 630 more rows
+## # ℹ Use `print(n = ...)` to see more rows
}\if{html}{\out{}}
\if{html}{\out{}}\preformatted{length(unique(all_tidy_coefs$lambda))
diff --git a/man/rmd/boost_tree_h2o.md b/man/rmd/boost_tree_h2o.md
index e3d67f2dd..3d184b102 100644
--- a/man/rmd/boost_tree_h2o.md
+++ b/man/rmd/boost_tree_h2o.md
@@ -62,6 +62,7 @@ boost_tree(
##
## Model fit template:
## agua::h2o_train_xgboost(x = missing_arg(), y = missing_arg(),
+## weights = missing_arg(), validation_frame = missing_arg(),
## col_sample_rate = integer(), ntrees = integer(), min_rows = integer(),
## max_depth = integer(), learn_rate = numeric(), min_split_improvement = numeric(),
## stopping_rounds = integer())
@@ -98,6 +99,7 @@ boost_tree(
##
## Model fit template:
## agua::h2o_train_xgboost(x = missing_arg(), y = missing_arg(),
+## weights = missing_arg(), validation_frame = missing_arg(),
## col_sample_rate = integer(), ntrees = integer(), min_rows = integer(),
## max_depth = integer(), learn_rate = numeric(), min_split_improvement = numeric(),
## stopping_rounds = integer())
diff --git a/man/rmd/glmnet-details.md b/man/rmd/glmnet-details.md
index 3c6750536..aebd37fee 100644
--- a/man/rmd/glmnet-details.md
+++ b/man/rmd/glmnet-details.md
@@ -200,6 +200,7 @@ all_tidy_coefs
## 9 (Intercept) 9 30.3 2.45 0.640
## 10 (Intercept) 10 31.1 2.23 0.673
## # … with 630 more rows
+## # ℹ Use `print(n = ...)` to see more rows
```
```r
diff --git a/man/rmd/linear_reg_gls.md b/man/rmd/linear_reg_gls.md
index 3290b6d02..ca32574cd 100644
--- a/man/rmd/linear_reg_gls.md
+++ b/man/rmd/linear_reg_gls.md
@@ -182,6 +182,7 @@ lme_fit %>% tidy() %>%
## # A tibble: 0 × 6
## # … with 6 variables: term , estimate , std.error , df ,
## # statistic , p.value
+## # ℹ Use `colnames()` to see all variable names
```
```r
diff --git a/man/rmd/linear_reg_h2o.md b/man/rmd/linear_reg_h2o.md
index 3e816b77a..da08030b6 100644
--- a/man/rmd/linear_reg_h2o.md
+++ b/man/rmd/linear_reg_h2o.md
@@ -40,8 +40,9 @@ linear_reg(penalty = 1, mixture = 0.5) %>%
## Computational engine: h2o
##
## Model fit template:
-## agua::h2o_train_glm(x = missing_arg(), y = missing_arg(), lambda = 1,
-## alpha = 0.5, family = "gaussian")
+## agua::h2o_train_glm(x = missing_arg(), y = missing_arg(), weights = missing_arg(),
+## validation_frame = missing_arg(), lambda = 1, alpha = 0.5,
+## family = "gaussian")
```
## Preprocessing requirements
diff --git a/man/rmd/logistic_reg_h2o.md b/man/rmd/logistic_reg_h2o.md
index a78cd070b..92acf8389 100644
--- a/man/rmd/logistic_reg_h2o.md
+++ b/man/rmd/logistic_reg_h2o.md
@@ -37,7 +37,8 @@ logistic_reg() %>%
## Computational engine: h2o
##
## Model fit template:
-## agua::h2o_train_glm(x = missing_arg(), y = missing_arg(), family = "binomial")
+## agua::h2o_train_glm(x = missing_arg(), y = missing_arg(), weights = missing_arg(),
+## validation_frame = missing_arg(), family = "binomial")
```
To use a non-default argument in [h2o::h2o.glm()], pass in as an engine argument to `set_engine()`:
@@ -58,7 +59,8 @@ logistic_reg() %>%
## Computational engine: h2o
##
## Model fit template:
-## agua::h2o_train_glm(x = missing_arg(), y = missing_arg(), compute_p_values = TRUE,
+## agua::h2o_train_glm(x = missing_arg(), y = missing_arg(), weights = missing_arg(),
+## validation_frame = missing_arg(), compute_p_values = TRUE,
## family = "binomial")
```
diff --git a/man/rmd/mlp_h2o.md b/man/rmd/mlp_h2o.md
index 1c5a09f30..371ef157c 100644
--- a/man/rmd/mlp_h2o.md
+++ b/man/rmd/mlp_h2o.md
@@ -66,9 +66,10 @@ mlp(
## Computational engine: h2o
##
## Model fit template:
-## agua::h2o_train_mlp(x = missing_arg(), y = missing_arg(), hidden = integer(1),
-## l2 = double(1), hidden_dropout_ratios = double(1), epochs = integer(1),
-## activation = character(1), rate = double(1))
+## agua::h2o_train_mlp(x = missing_arg(), y = missing_arg(), weights = missing_arg(),
+## validation_frame = missing_arg(), hidden = integer(1), l2 = double(1),
+## hidden_dropout_ratios = double(1), epochs = integer(1), activation = character(1),
+## rate = double(1))
```
## Translation from parsnip to the original package (classification)
@@ -102,9 +103,10 @@ mlp(
## Computational engine: h2o
##
## Model fit template:
-## agua::h2o_train_mlp(x = missing_arg(), y = missing_arg(), hidden = integer(1),
-## l2 = double(1), hidden_dropout_ratios = double(1), epochs = integer(1),
-## activation = character(1), rate = double(1))
+## agua::h2o_train_mlp(x = missing_arg(), y = missing_arg(), weights = missing_arg(),
+## validation_frame = missing_arg(), hidden = integer(1), l2 = double(1),
+## hidden_dropout_ratios = double(1), epochs = integer(1), activation = character(1),
+## rate = double(1))
```
diff --git a/man/rmd/multinom_reg_h2o.md b/man/rmd/multinom_reg_h2o.md
index f610b7bb2..4b4c5e7de 100644
--- a/man/rmd/multinom_reg_h2o.md
+++ b/man/rmd/multinom_reg_h2o.md
@@ -39,8 +39,9 @@ multinom_reg(penalty = double(1), mixture = double(1)) %>%
## Computational engine: h2o
##
## Model fit template:
-## agua::h2o_train_glm(x = missing_arg(), y = missing_arg(), family = missing_arg(),
-## lambda = double(1), alpha = double(1), family = "multinomial")
+## agua::h2o_train_glm(x = missing_arg(), y = missing_arg(), weights = missing_arg(),
+## validation_frame = missing_arg(), lambda = double(1), alpha = double(1),
+## family = "multinomial")
```
## Preprocessing requirements
diff --git a/man/rmd/naive_Bayes_h2o.md b/man/rmd/naive_Bayes_h2o.md
index f35426bfb..d5393c52c 100644
--- a/man/rmd/naive_Bayes_h2o.md
+++ b/man/rmd/naive_Bayes_h2o.md
@@ -45,7 +45,8 @@ naive_Bayes(Laplace = numeric(0)) %>%
## Computational engine: h2o
##
## Model fit template:
-## agua::h2o_train_nb(x = missing_arg(), y = missing_arg(), laplace = numeric(0))
+## agua::h2o_train_nb(x = missing_arg(), y = missing_arg(), weights = missing_arg(),
+## validation_frame = missing_arg(), laplace = numeric(0))
```
## Initializing h2o
diff --git a/man/rmd/poisson_reg_h2o.md b/man/rmd/poisson_reg_h2o.md
index e0dda70a9..ff6ef2c59 100644
--- a/man/rmd/poisson_reg_h2o.md
+++ b/man/rmd/poisson_reg_h2o.md
@@ -43,8 +43,9 @@ poisson_reg(penalty = double(1), mixture = double(1)) %>%
## Computational engine: h2o
##
## Model fit template:
-## agua::h2o_train_glm(x = missing_arg(), y = missing_arg(), lambda = double(1),
-## alpha = double(1), family = "poisson")
+## agua::h2o_train_glm(x = missing_arg(), y = missing_arg(), weights = missing_arg(),
+## validation_frame = missing_arg(), lambda = double(1), alpha = double(1),
+## family = "poisson")
```
## Preprocessing requirements
diff --git a/man/rmd/proportional_hazards_glmnet.md b/man/rmd/proportional_hazards_glmnet.md
index cb3039369..5f06573c2 100644
--- a/man/rmd/proportional_hazards_glmnet.md
+++ b/man/rmd/proportional_hazards_glmnet.md
@@ -42,7 +42,7 @@ proportional_hazards(penalty = double(1), mixture = double(1)) %>%
## Computational engine: glmnet
##
## Model fit template:
-## censored::glmnet_fit_wrapper(formula = missing_arg(), data = missing_arg(),
+## censored::coxnet_train(formula = missing_arg(), data = missing_arg(),
## weights = missing_arg(), alpha = double(1))
```
diff --git a/man/rmd/rand_forest_h2o.md b/man/rmd/rand_forest_h2o.md
index cd2dfed76..68c9da38f 100644
--- a/man/rmd/rand_forest_h2o.md
+++ b/man/rmd/rand_forest_h2o.md
@@ -44,8 +44,9 @@ rand_forest(
## Computational engine: h2o
##
## Model fit template:
-## agua::h2o_train_rf(x = missing_arg(), y = missing_arg(), mtries = integer(1),
-## ntrees = integer(1), min_rows = integer(1))
+## agua::h2o_train_rf(x = missing_arg(), y = missing_arg(), weights = missing_arg(),
+## validation_frame = missing_arg(), mtries = integer(1), ntrees = integer(1),
+## min_rows = integer(1))
```
`min_rows()` and `min_cols()` will adjust the number of neighbors if the chosen value if it is not consistent with the actual data dimensions.
@@ -75,8 +76,9 @@ rand_forest(
## Computational engine: h2o
##
## Model fit template:
-## agua::h2o_train_rf(x = missing_arg(), y = missing_arg(), mtries = integer(1),
-## ntrees = integer(1), min_rows = integer(1))
+## agua::h2o_train_rf(x = missing_arg(), y = missing_arg(), weights = missing_arg(),
+## validation_frame = missing_arg(), mtries = integer(1), ntrees = integer(1),
+## min_rows = integer(1))
```
## Preprocessing requirements
diff --git a/man/rmd/rule_fit_h2o.md b/man/rmd/rule_fit_h2o.md
index 043f995be..65d31e1a9 100644
--- a/man/rmd/rule_fit_h2o.md
+++ b/man/rmd/rule_fit_h2o.md
@@ -59,7 +59,8 @@ rule_fit(
## Computational engine: h2o
##
## Model fit template:
-## agua::h2o_train_rule(x = missing_arg(), y = missing_arg(), rule_generation_ntrees = integer(1),
+## agua::h2o_train_rule(x = missing_arg(), y = missing_arg(), weights = missing_arg(),
+## validation_frame = missing_arg(), rule_generation_ntrees = integer(1),
## max_rule_length = integer(1), lambda = numeric(1))
```
@@ -94,7 +95,8 @@ rule_fit(
## Computational engine: h2o
##
## Model fit template:
-## agua::h2o_train_rule(x = missing_arg(), y = missing_arg(), rule_generation_ntrees = integer(1),
+## agua::h2o_train_rule(x = missing_arg(), y = missing_arg(), weights = missing_arg(),
+## validation_frame = missing_arg(), rule_generation_ntrees = integer(1),
## max_rule_length = integer(1), lambda = numeric(1))
```