From 9ed60b0902caa391d5b4592402e156d96ec754ee Mon Sep 17 00:00:00 2001 From: topepo Date: Tue, 26 May 2020 17:57:56 -0400 Subject: [PATCH 1/4] updated GHA --- .Rbuildignore | 1 + .github/.gitignore | 1 + .github/workflows/R-CMD-check.yaml | 52 +++++++++++++++++++++------- .github/workflows/pr-commands.yaml | 51 +++++++++++++++++++++++++++ .github/workflows/test-coverage.yaml | 46 ++++++++++++++++++++++++ README.Rmd | 2 ++ codecov.yml | 14 ++++++++ 7 files changed, 155 insertions(+), 12 deletions(-) create mode 100644 .github/.gitignore create mode 100644 .github/workflows/pr-commands.yaml create mode 100644 .github/workflows/test-coverage.yaml create mode 100644 codecov.yml diff --git a/.Rbuildignore b/.Rbuildignore index 40e1973c8..fbe868029 100644 --- a/.Rbuildignore +++ b/.Rbuildignore @@ -17,3 +17,4 @@ derby.log ^\.github$ ^CODE_OF_CONDUCT\.md$ ^README\.html$ +^codecov\.yml$ diff --git a/.github/.gitignore b/.github/.gitignore new file mode 100644 index 000000000..2d19fc766 --- /dev/null +++ b/.github/.gitignore @@ -0,0 +1 @@ +*.html diff --git a/.github/workflows/R-CMD-check.yaml b/.github/workflows/R-CMD-check.yaml index cf63479cc..1432f178e 100644 --- a/.github/workflows/R-CMD-check.yaml +++ b/.github/workflows/R-CMD-check.yaml @@ -1,4 +1,7 @@ -on: [push, pull_request] +on: + push: + pull_request: + types: [opened, synchronize, reopened] name: R-CMD-check @@ -17,43 +20,68 @@ jobs: - { os: windows-latest, r: 'devel'} - { os: ubuntu-16.04, r: '3.5', cran: "https://demo.rstudiopm.com/all/__linux__/xenial/latest"} - { os: ubuntu-16.04, r: '3.6', cran: "https://demo.rstudiopm.com/all/__linux__/xenial/latest"} + env: R_REMOTES_NO_ERRORS_FROM_WARNINGS: true - PIP_NO_WARN_SCRIPT_LOCATION: false + RSPM: ${{ matrix.config.rspm }} + GITHUB_PAT: ${{ secrets.GITHUB_TOKEN }} steps: - - uses: actions/checkout@v1 + - uses: actions/checkout@v2 - uses: r-lib/actions/setup-r@master + with: + r-version: ${{ matrix.config.r }} - uses: r-lib/actions/setup-pandoc@master + - name: Query dependencies + run: | + install.packages('remotes') + saveRDS(remotes::dev_package_deps(dependencies = TRUE), ".github/depends.Rds", version = 2) + writeLines(sprintf("R-%i.%i", getRversion()$major, getRversion()$minor), ".github/R-version") + shell: Rscript {0} + - name: Cache R packages if: runner.os != 'Windows' uses: actions/cache@v1 with: path: ${{ env.R_LIBS_USER }} - key: ${{ runner.os }}-r-${{ matrix.config.r }}-${{ hashFiles('DESCRIPTION') }} + key: ${{ runner.os }}-${{ hashFiles('.github/R-version') }}-1-${{ hashFiles('.github/depends.Rds') }} + restore-keys: ${{ runner.os }}-${{ hashFiles('.github/R-version') }}-1- - name: Install system dependencies if: runner.os == 'Linux' env: RHUB_PLATFORM: linux-x86_64-ubuntu-gcc run: | - Rscript -e "install.packages('remotes')" -e "remotes::install_github('r-hub/sysreqs')" + Rscript -e "remotes::install_github('r-hub/sysreqs')" sysreqs=$(Rscript -e "cat(sysreqs::sysreq_commands('DESCRIPTION'))") sudo -s eval "$sysreqs" + - name: Install dependencies - run: Rscript -e "install.packages('remotes')" -e "remotes::install_deps(dependencies = TRUE)" -e "remotes::install_cran('rcmdcheck')" + run: | + remotes::install_deps(dependencies = TRUE) + remotes::install_cran("rcmdcheck") + shell: Rscript {0} - - name: Install TensorFlow + - name: Session info run: | - Rscript -e "remotes::install_github('rstudio/reticulate') # TODO remove when reticulate 1.14 is on CRAN" - Rscript -e "reticulate::install_miniconda()" - Rscript -e "reticulate::conda_create('r-reticulate', packages = 'python==3.6.9')" - Rscript -e "tensorflow::install_tensorflow(version='1.14.0')" + options(width = 100) + pkgs <- installed.packages()[, "Package"] + sessioninfo::session_info(pkgs, include_base = TRUE) + shell: Rscript {0} + - name: Check - run: Rscript -e "rcmdcheck::rcmdcheck(args = '--no-manual', error_on = 'warning', check_dir = 'check')" + env: + _R_CHECK_CRAN_INCOMING_: false + run: rcmdcheck::rcmdcheck(args = c("--no-manual", "--as-cran"), error_on = "warning", check_dir = "check") + shell: Rscript {0} + + - name: Show testthat output + if: always() + run: find check -name 'testthat.Rout*' -exec cat '{}' \; || true + shell: bash - name: Upload check results if: failure() diff --git a/.github/workflows/pr-commands.yaml b/.github/workflows/pr-commands.yaml new file mode 100644 index 000000000..0d3cb7162 --- /dev/null +++ b/.github/workflows/pr-commands.yaml @@ -0,0 +1,51 @@ +on: + issue_comment: + types: [created] +name: Commands +jobs: + document: + if: startsWith(github.event.comment.body, '/document') + name: document + runs-on: macOS-latest + env: + GITHUB_PAT: ${{ secrets.GITHUB_TOKEN }} + steps: + - uses: actions/checkout@v2 + - uses: r-lib/actions/pr-fetch@master + with: + repo-token: ${{ secrets.GITHUB_TOKEN }} + - uses: r-lib/actions/setup-r@master + - name: Install dependencies + run: Rscript -e 'install.packages(c("remotes", "roxygen2"))' -e 'remotes::install_deps(dependencies = TRUE)' + - name: Document + run: Rscript -e 'roxygen2::roxygenise()' + - name: commit + run: | + git add man/\* NAMESPACE + git commit -m 'Document' + - uses: r-lib/actions/pr-push@master + with: + repo-token: ${{ secrets.GITHUB_TOKEN }} + style: + if: startsWith(github.event.comment.body, '/style') + name: style + runs-on: macOS-latest + env: + GITHUB_PAT: ${{ secrets.GITHUB_TOKEN }} + steps: + - uses: actions/checkout@v2 + - uses: r-lib/actions/pr-fetch@master + with: + repo-token: ${{ secrets.GITHUB_TOKEN }} + - uses: r-lib/actions/setup-r@master + - name: Install dependencies + run: Rscript -e 'install.packages("styler")' + - name: Style + run: Rscript -e 'styler::style_pkg()' + - name: commit + run: | + git add \*.R + git commit -m 'Style' + - uses: r-lib/actions/pr-push@master + with: + repo-token: ${{ secrets.GITHUB_TOKEN }} diff --git a/.github/workflows/test-coverage.yaml b/.github/workflows/test-coverage.yaml new file mode 100644 index 000000000..3058d037b --- /dev/null +++ b/.github/workflows/test-coverage.yaml @@ -0,0 +1,46 @@ +on: + push: + branches: + - master + pull_request: + branches: + - master + +name: test-coverage + +jobs: + test-coverage: + runs-on: macOS-latest + env: + GITHUB_PAT: ${{ secrets.GITHUB_TOKEN }} + steps: + - uses: actions/checkout@v2 + + - uses: r-lib/actions/setup-r@master + + - uses: r-lib/actions/setup-pandoc@master + + - name: Query dependencies + run: | + install.packages('remotes') + saveRDS(remotes::dev_package_deps(dependencies = TRUE), ".github/depends.Rds", version = 2) + writeLines(sprintf("R-%i.%i", getRversion()$major, getRversion()$minor), ".github/R-version") + shell: Rscript {0} + + - name: Cache R packages + uses: actions/cache@v1 + with: + path: ${{ env.R_LIBS_USER }} + key: ${{ runner.os }}-${{ hashFiles('.github/R-version') }}-1-${{ hashFiles('.github/depends.Rds') }} + restore-keys: ${{ runner.os }}-${{ hashFiles('.github/R-version') }}-1- + + - name: Install dependencies + run: | + install.packages(c("remotes")) + remotes::install_deps(dependencies = TRUE) + remotes::install_cran("covr") + shell: Rscript {0} + + - name: Test coverage + run: covr::codecov() + shell: Rscript {0} diff --git a/README.Rmd b/README.Rmd index 81178f700..d9f035751 100644 --- a/README.Rmd +++ b/README.Rmd @@ -22,6 +22,8 @@ knitr::opts_chunk$set( [![Downloads](http://cranlogs.r-pkg.org/badges/parsnip)](https://cran.rstudio.com/package=parsnip) [![lifecycle](https://img.shields.io/badge/lifecycle-maturing-blue.svg)](https://www.tidyverse.org/lifecycle/#maturing) +[![Codecov test coverage](https://codecov.io/gh/tidymodels/parsnip/branch/master/graph/badge.svg)](https://codecov.io/gh/tidymodels/parsnip?branch=master) +[![R build status](https://github.com/tidymodels/parsnip/workflows/R-CMD-check/badge.svg)](https://github.com/tidymodels/parsnip/actions) ## Introduction diff --git a/codecov.yml b/codecov.yml new file mode 100644 index 000000000..04c558599 --- /dev/null +++ b/codecov.yml @@ -0,0 +1,14 @@ +comment: false + +coverage: + status: + project: + default: + target: auto + threshold: 1% + informational: true + patch: + default: + target: auto + threshold: 1% + informational: true From ab73263b212dd5e6385e204bff8645d6f85b740f Mon Sep 17 00:00:00 2001 From: topepo Date: Tue, 26 May 2020 18:45:08 -0400 Subject: [PATCH 2/4] added tf installs again --- .github/workflows/R-CMD-check.yaml | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/.github/workflows/R-CMD-check.yaml b/.github/workflows/R-CMD-check.yaml index 1432f178e..91be694b8 100644 --- a/.github/workflows/R-CMD-check.yaml +++ b/.github/workflows/R-CMD-check.yaml @@ -65,6 +65,14 @@ jobs: remotes::install_cran("rcmdcheck") shell: Rscript {0} + - name: Install TensorFlow + run: | + Rscript -e "reticulate::install_miniconda()" + Rscript -e "reticulate::conda_create('r-reticulate', packages = 'python==3.6.9')" + Rscript -e "tensorflow::install_tensorflow(version='1.14.0')" + - name: Check + run: Rscript -e "rcmdcheck::rcmdcheck(args = '--no-manual', error_on = 'warning', check_dir = 'check')" + - name: Session info run: | options(width = 100) From 32050f03a124893827803897fcbc92bdb365412b Mon Sep 17 00:00:00 2001 From: topepo Date: Tue, 26 May 2020 19:30:41 -0400 Subject: [PATCH 3/4] tf install for code coverage --- .github/workflows/test-coverage.yaml | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/.github/workflows/test-coverage.yaml b/.github/workflows/test-coverage.yaml index 3058d037b..2338abea3 100644 --- a/.github/workflows/test-coverage.yaml +++ b/.github/workflows/test-coverage.yaml @@ -41,6 +41,12 @@ jobs: remotes::install_cran("covr") shell: Rscript {0} + - name: Install TensorFlow + run: | + Rscript -e "reticulate::install_miniconda()" + Rscript -e "reticulate::conda_create('r-reticulate', packages = 'python==3.6.9')" + Rscript -e "tensorflow::install_tensorflow(version='1.14.0')" + - name: Test coverage run: covr::codecov() shell: Rscript {0} From e1178750f9db6d4f8bcdc2e67b8170c03d118937 Mon Sep 17 00:00:00 2001 From: topepo Date: Tue, 26 May 2020 20:07:37 -0400 Subject: [PATCH 4/4] documentation update --- .github/workflows/R-CMD-check.yaml | 1 + docs/404.html | 26 +-- docs/CODE_OF_CONDUCT.html | 4 +- docs/articles/articles/Classification.html | 4 +- docs/articles/articles/Models.html | 4 +- docs/articles/articles/Regression.html | 4 +- docs/articles/articles/Scratch.html | 4 +- docs/articles/articles/Submodels.html | 113 ++++----- docs/articles/index.html | 4 +- docs/articles/parsnip_Intro.html | 74 +++--- docs/authors.html | 4 +- docs/dev/articles/articles/Scratch.html | 2 +- docs/dev/articles/articles/Submodels.html | 16 +- docs/dev/index.html | 13 +- docs/dev/news/index.html | 10 +- docs/dev/pkgdown.css | 4 +- docs/dev/pkgdown.yml | 6 +- docs/dev/reference/boost_tree.html | 99 ++++---- docs/dev/reference/decision_tree.html | 97 ++++---- docs/dev/reference/descriptors.html | 12 +- docs/dev/reference/fit.html | 7 +- docs/dev/reference/has_multi_predict.html | 12 +- docs/dev/reference/linear_reg.html | 103 ++++---- docs/dev/reference/logistic_reg.html | 105 ++++----- docs/dev/reference/mars.html | 46 ++-- docs/dev/reference/mlp.html | 88 ++++--- docs/dev/reference/model_fit.html | 4 +- docs/dev/reference/model_spec.html | 12 +- docs/dev/reference/multinom_reg.html | 84 ++++--- docs/dev/reference/nearest_neighbor.html | 40 ++-- docs/dev/reference/null_model.html | 36 ++- docs/dev/reference/predict.model_fit.html | 48 ++-- docs/dev/reference/rand_forest.html | 120 +++++----- docs/dev/reference/reexports.html | 2 +- docs/dev/reference/repair_call.html | 220 ++++++++++++++++++ docs/dev/reference/surv_reg.html | 42 ++-- docs/dev/reference/svm_poly.html | 38 ++- docs/dev/reference/svm_rbf.html | 78 +++---- docs/dev/reference/tidy.nullmodel.html | 12 +- .../reference/varying_args.model_spec.html | 52 ++--- docs/dev/sitemap.xml | 3 + docs/index.html | 170 +++++++------- docs/issue_template.html | 8 +- docs/news/index.html | 58 ++--- docs/pkgdown.css | 4 - docs/pkgdown.yml | 9 +- docs/reference/C5.0_train.html | 4 +- docs/reference/add_on_exports.html | 4 +- docs/reference/add_rowindex.html | 4 +- docs/reference/boost_tree.html | 18 +- docs/reference/check_empty_ellipse.html | 4 +- docs/reference/control_parsnip.html | 4 +- docs/reference/convert_args.html | 4 +- docs/reference/convert_stan_interval.html | 4 +- docs/reference/decision_tree.html | 4 +- docs/reference/descriptors.html | 16 +- docs/reference/eval_args.html | 4 +- docs/reference/fit.html | 8 +- docs/reference/get_model_env.html | 4 +- docs/reference/has_multi_predict.html | 4 +- docs/reference/index.html | 19 +- docs/reference/keras_mlp.html | 4 +- docs/reference/linear_reg.html | 4 +- docs/reference/logistic_reg.html | 4 +- docs/reference/make_classes.html | 4 +- docs/reference/mars.html | 4 +- docs/reference/mlp.html | 4 +- docs/reference/model_fit.html | 4 +- docs/reference/model_printer.html | 4 +- docs/reference/model_spec.html | 16 +- docs/reference/multi_predict.html | 4 +- docs/reference/multinom_reg.html | 4 +- docs/reference/nearest_neighbor.html | 4 +- docs/reference/null_model.html | 4 +- docs/reference/nullmodel.html | 4 +- docs/reference/other_predict.html | 4 +- docs/reference/predict.model_fit.html | 4 +- docs/reference/rand_forest.html | 4 +- docs/reference/reexports.html | 6 +- docs/reference/rpart_train.html | 4 +- docs/reference/set_args.html | 4 +- docs/reference/set_engine.html | 4 +- docs/reference/set_new_model.html | 4 +- docs/reference/show_call.html | 4 +- docs/reference/surv_reg.html | 4 +- docs/reference/svm_poly.html | 4 +- docs/reference/svm_rbf.html | 4 +- docs/reference/tidy.model_fit.html | 4 +- docs/reference/tidy.nullmodel.html | 4 +- docs/reference/translate.html | 4 +- docs/reference/type_sum.model_spec.html | 4 +- docs/reference/varying.html | 4 +- docs/reference/varying_args.model_spec.html | 4 +- docs/reference/xgb_train.html | 28 +-- 94 files changed, 1145 insertions(+), 1032 deletions(-) create mode 100644 docs/dev/reference/repair_call.html diff --git a/.github/workflows/R-CMD-check.yaml b/.github/workflows/R-CMD-check.yaml index 91be694b8..c918c583e 100644 --- a/.github/workflows/R-CMD-check.yaml +++ b/.github/workflows/R-CMD-check.yaml @@ -70,6 +70,7 @@ jobs: Rscript -e "reticulate::install_miniconda()" Rscript -e "reticulate::conda_create('r-reticulate', packages = 'python==3.6.9')" Rscript -e "tensorflow::install_tensorflow(version='1.14.0')" + - name: Check run: Rscript -e "rcmdcheck::rcmdcheck(args = '--no-manual', error_on = 'warning', check_dir = 'check')" diff --git a/docs/404.html b/docs/404.html index 709c736a2..c64cfd877 100644 --- a/docs/404.html +++ b/docs/404.html @@ -9,18 +9,18 @@ Page not found (404) • parsnip - - - - - - + + + + + + - + @@ -35,14 +35,14 @@ - - - + + + - + @@ -75,9 +75,9 @@ diff --git a/docs/CODE_OF_CONDUCT.html b/docs/CODE_OF_CONDUCT.html index 69e153953..a4920a6b3 100644 --- a/docs/CODE_OF_CONDUCT.html +++ b/docs/CODE_OF_CONDUCT.html @@ -42,7 +42,7 @@ - + @@ -77,7 +77,7 @@ diff --git a/docs/articles/articles/Classification.html b/docs/articles/articles/Classification.html index cb8804dcd..2c28b2b51 100644 --- a/docs/articles/articles/Classification.html +++ b/docs/articles/articles/Classification.html @@ -19,7 +19,7 @@ - + @@ -119,7 +120,7 @@

# Or the development version from GitHub: # install.packages("devtools") -devtools::install_github("tidymodels/parsnip") +devtools::install_github("tidymodels/parsnip")

@@ -199,7 +200,7 @@

fit(mpg ~ ., data = mtcars) #> parsnip model object #> -#> Fit time: 75ms +#> Fit time: 69ms #> Ranger result #> #> Call: @@ -213,8 +214,8 @@

#> Target node size: 5 #> Variable importance mode: impurity #> Splitrule: variance -#> OOB prediction error (MSE): 5.779248 -#> R squared (OOB): 0.8408977

+#> OOB prediction error (MSE): 5.815633 +#> R squared (OOB): 0.839896

A list of all parsnip models across different CRAN packages can be found at tidymodels.org.

Data sets previously found in parsnip are now find in the modeldata package.

diff --git a/docs/dev/news/index.html b/docs/dev/news/index.html index e798725e0..4c4e988db 100644 --- a/docs/dev/news/index.html +++ b/docs/dev/news/index.html @@ -145,15 +145,19 @@

Other Changes

    -
  • -tidyr >= 1.0.0 is now required.
  • +
  • tidyr >= 1.0.0 is now required.

  • +
  • SVM models produced by kernlab now use the formula method. This change was due to how ksvm() made indicator variables for factor predictors (with one-hot encodings). Since the ordinary formula method did not do this, the data are passed as-is to ksvm() so that the results are closer to what one would get if ksmv() were called directly.

  • +
  • MARS models produced by earth now use the formula method.

  • +
  • Under-the-hood changes were made so that non-standard data arguments in the modeling packages can be accomodated. (#315)

New Features

    -
  • A new main argument was added to boost_tree() called stop_iter for early stopping. The xgb_train() function gained arguments for early stopping and a percentage of data to leave out for a validation set.
  • +
  • A new main argument was added to boost_tree() called stop_iter for early stopping. The xgb_train() function gained arguments for early stopping and a percentage of data to leave out for a validation set.

  • +
  • If fit() is used and the underlying model uses a formula, the actual formula is pass to the model (instead of a placeholder). This makes the model call better.

  • +
  • A function named repair_call() was added. This can help change the underlying models call object to better reflect what they would have obtained if the model function had been used directly (instead of via parsnip). This is only useful when the user chooses a formula interface and the model uses a formula interface. It will also be of limited use when a recipes is used to construct the feature set in workflows or tune.

diff --git a/docs/dev/pkgdown.css b/docs/dev/pkgdown.css index c01e5923b..1273238dd 100644 --- a/docs/dev/pkgdown.css +++ b/docs/dev/pkgdown.css @@ -244,14 +244,14 @@ nav[data-toggle='toc'] .nav .nav > .active:focus > a { .ref-index th {font-weight: normal;} -.ref-index td {vertical-align: top;} +.ref-index td {vertical-align: top; min-width: 100px} .ref-index .icon {width: 40px;} .ref-index .alias {width: 40%;} .ref-index-icons .alias {width: calc(40% - 40px);} .ref-index .title {width: 60%;} .ref-arguments th {text-align: right; padding-right: 10px;} -.ref-arguments th, .ref-arguments td {vertical-align: top;} +.ref-arguments th, .ref-arguments td {vertical-align: top; min-width: 100px} .ref-arguments .name {width: 20%;} .ref-arguments .desc {width: 80%;} diff --git a/docs/dev/pkgdown.yml b/docs/dev/pkgdown.yml index f9f24b140..e6716b06c 100644 --- a/docs/dev/pkgdown.yml +++ b/docs/dev/pkgdown.yml @@ -1,6 +1,6 @@ pandoc: 2.9.2.1 -pkgdown: 1.5.1 -pkgdown_sha: ~ +pkgdown: 1.5.1.9000 +pkgdown_sha: ac78596154e403df5f4e683f2185d88225a0fea6 articles: Classification: articles/Classification.html Models: articles/Models.html @@ -8,7 +8,7 @@ articles: Scratch: articles/Scratch.html Submodels: articles/Submodels.html parsnip_Intro: parsnip_Intro.html -last_built: 2020-05-11T17:19Z +last_built: 2020-05-27T00:06Z urls: reference: https://parsnip.tidymodels.org/reference article: https://parsnip.tidymodels.org/articles diff --git a/docs/dev/reference/boost_tree.html b/docs/dev/reference/boost_tree.html index ebb4484ab..8dd6013bf 100644 --- a/docs/dev/reference/boost_tree.html +++ b/docs/dev/reference/boost_tree.html @@ -322,66 +322,61 @@

boost_tree() %&gt;%
-  set_engine("xgboost") %&gt;%
-  set_mode("regression") %&gt;%
-  translate()

## Boosted Tree Model Specification (regression)
-## 
-## Computational engine: xgboost 
-## 
-## Model fit template:
-## parsnip::xgb_train(x = missing_arg(), y = missing_arg(), nthread = 1, 
-##     verbose = 0)
-

boost_tree() %&gt;%
-  set_engine("xgboost") %&gt;%
-  set_mode("classification") %&gt;%
-  translate()

## Boosted Tree Model Specification (classification)
-## 
-## Computational engine: xgboost 
-## 
-## Model fit template:
-## parsnip::xgb_train(x = missing_arg(), y = missing_arg(), nthread = 1, 
-##     verbose = 0)
-
+

boost_tree() %>%
+  set_engine("xgboost") %>%
+  set_mode("regression") %>%
+  translate()

## Boosted Tree Model Specification (regression)
+## 
+## Computational engine: xgboost 
+## 
+## Model fit template:
+## parsnip::xgb_train(x = missing_arg(), y = missing_arg(), nthread = 1, 
+##     verbose = 0)

boost_tree() %>%
+  set_engine("xgboost") %>%
+  set_mode("classification") %>%
+  translate()

## Boosted Tree Model Specification (classification)
+## 
+## Computational engine: xgboost 
+## 
+## Model fit template:
+## parsnip::xgb_train(x = missing_arg(), y = missing_arg(), nthread = 1, 
+##     verbose = 0)

C5.0

-

boost_tree() %&gt;%
-  set_engine("C5.0") %&gt;%
-  set_mode("classification") %&gt;%
-  translate()

## Boosted Tree Model Specification (classification)
-## 
-## Computational engine: C5.0 
-## 
-## Model fit template:
-## parsnip::C5.0_train(x = missing_arg(), y = missing_arg(), weights = missing_arg())
-
+

boost_tree() %>%
+  set_engine("C5.0") %>%
+  set_mode("classification") %>%
+  translate()

## Boosted Tree Model Specification (classification)
+## 
+## Computational engine: C5.0 
+## 
+## Model fit template:
+## parsnip::C5.0_train(x = missing_arg(), y = missing_arg(), weights = missing_arg())

Note that C50::C5.0() does not require factor predictors to be converted to indicator variables.

spark

-

boost_tree() %&gt;%
-  set_engine("spark") %&gt;%
-  set_mode("regression") %&gt;%
-  translate()

## Boosted Tree Model Specification (regression)
-## 
-## Computational engine: spark 
-## 
-## Model fit template:
-## sparklyr::ml_gradient_boosted_trees(x = missing_arg(), formula = missing_arg(), 
-##     type = "regression", seed = sample.int(10^5, 1))
-

boost_tree() %&gt;%
-  set_engine("spark") %&gt;%
-  set_mode("classification") %&gt;%
-  translate()

## Boosted Tree Model Specification (classification)
-## 
-## Computational engine: spark 
-## 
-## Model fit template:
-## sparklyr::ml_gradient_boosted_trees(x = missing_arg(), formula = missing_arg(), 
-##     type = "classification", seed = sample.int(10^5, 1))
-
+

boost_tree() %>%
+  set_engine("spark") %>%
+  set_mode("regression") %>%
+  translate()

## Boosted Tree Model Specification (regression)
+## 
+## Computational engine: spark 
+## 
+## Model fit template:
+## sparklyr::ml_gradient_boosted_trees(x = missing_arg(), formula = missing_arg(), 
+##     type = "regression", seed = sample.int(10^5, 1))

boost_tree() %>%
+  set_engine("spark") %>%
+  set_mode("classification") %>%
+  translate()

## Boosted Tree Model Specification (classification)
+## 
+## Computational engine: spark 
+## 
+## Model fit template:
+## sparklyr::ml_gradient_boosted_trees(x = missing_arg(), formula = missing_arg(), 
+##     type = "classification", seed = sample.int(10^5, 1))

Parameter translations

diff --git a/docs/dev/reference/decision_tree.html b/docs/dev/reference/decision_tree.html index ede787ad6..fe0e95745 100644 --- a/docs/dev/reference/decision_tree.html +++ b/docs/dev/reference/decision_tree.html @@ -267,67 +267,62 @@

decision_tree() %&gt;%
-  set_engine("rpart") %&gt;%
-  set_mode("regression") %&gt;%
-  translate()

## Decision Tree Model Specification (regression)
-## 
-## Computational engine: rpart 
-## 
-## Model fit template:
-## rpart::rpart(formula = missing_arg(), data = missing_arg(), weights = missing_arg())
-

decision_tree() %&gt;%
-  set_engine("rpart") %&gt;%
-  set_mode("classification") %&gt;%
-  translate()

## Decision Tree Model Specification (classification)
-## 
-## Computational engine: rpart 
-## 
-## Model fit template:
-## rpart::rpart(formula = missing_arg(), data = missing_arg(), weights = missing_arg())
-
+

decision_tree() %>%
+  set_engine("rpart") %>%
+  set_mode("regression") %>%
+  translate()

## Decision Tree Model Specification (regression)
+## 
+## Computational engine: rpart 
+## 
+## Model fit template:
+## rpart::rpart(formula = missing_arg(), data = missing_arg(), weights = missing_arg())

decision_tree() %>%
+  set_engine("rpart") %>%
+  set_mode("classification") %>%
+  translate()

## Decision Tree Model Specification (classification)
+## 
+## Computational engine: rpart 
+## 
+## Model fit template:
+## rpart::rpart(formula = missing_arg(), data = missing_arg(), weights = missing_arg())

Note that rpart::rpart() does not require factor predictors to be converted to indicator variables.

C5.0

-

decision_tree() %&gt;%
-  set_engine("C5.0") %&gt;%
-  set_mode("classification") %&gt;%
-  translate()

## Decision Tree Model Specification (classification)
-## 
-## Computational engine: C5.0 
-## 
-## Model fit template:
-## parsnip::C5.0_train(x = missing_arg(), y = missing_arg(), weights = missing_arg(), 
-##     trials = 1)
-
+

decision_tree() %>%
+  set_engine("C5.0") %>%
+  set_mode("classification") %>%
+  translate()

## Decision Tree Model Specification (classification)
+## 
+## Computational engine: C5.0 
+## 
+## Model fit template:
+## parsnip::C5.0_train(x = missing_arg(), y = missing_arg(), weights = missing_arg(), 
+##     trials = 1)

Note that C50::C5.0() does not require factor predictors to be converted to indicator variables.

spark

-

decision_tree() %&gt;%
-  set_engine("spark") %&gt;%
-  set_mode("regression") %&gt;%
-  translate()

## Decision Tree Model Specification (regression)
-## 
-## Computational engine: spark 
-## 
-## Model fit template:
-## sparklyr::ml_decision_tree_classifier(x = missing_arg(), formula = missing_arg(), 
-##     seed = sample.int(10^5, 1))
-

decision_tree() %&gt;%
-  set_engine("spark") %&gt;%
-  set_mode("classification") %&gt;%
-  translate()

## Decision Tree Model Specification (classification)
-## 
-## Computational engine: spark 
-## 
-## Model fit template:
-## sparklyr::ml_decision_tree_classifier(x = missing_arg(), formula = missing_arg(), 
-##     seed = sample.int(10^5, 1))
-
+

decision_tree() %>%
+  set_engine("spark") %>%
+  set_mode("regression") %>%
+  translate()

## Decision Tree Model Specification (regression)
+## 
+## Computational engine: spark 
+## 
+## Model fit template:
+## sparklyr::ml_decision_tree_classifier(x = missing_arg(), formula = missing_arg(), 
+##     seed = sample.int(10^5, 1))

decision_tree() %>%
+  set_engine("spark") %>%
+  set_mode("classification") %>%
+  translate()

## Decision Tree Model Specification (classification)
+## 
+## Computational engine: spark 
+## 
+## Model fit template:
+## sparklyr::ml_decision_tree_classifier(x = missing_arg(), formula = missing_arg(), 
+##     seed = sample.int(10^5, 1))

Parameter translations

diff --git a/docs/dev/reference/descriptors.html b/docs/dev/reference/descriptors.html index 6db6fa88f..faa7abdaf 100644 --- a/docs/dev/reference/descriptors.html +++ b/docs/dev/reference/descriptors.html @@ -195,9 +195,9 @@

Details .obs() = 150 .lvls() = NA (no factor outcome) .facts() = 1 (the Species predictor) - .y() = &lt;vector&gt; (Sepal.Width as a vector) - .x() = &lt;data.frame&gt; (The other 4 columns as a data frame) - .dat() = &lt;data.frame&gt; (The full data set) + .y() = <vector> (Sepal.Width as a vector) + .x() = <data.frame> (The other 4 columns as a data frame) + .dat() = <data.frame> (The full data set)

If the formula Species ~ . where used:

@@ -206,9 +206,9 @@ 

Details .obs() = 150 .lvls() = c(setosa = 50, versicolor = 50, virginica = 50) .facts() = 0 - .y() = &lt;vector&gt; (Species as a vector) - .x() = &lt;data.frame&gt; (The other 4 columns as a data frame) - .dat() = &lt;data.frame&gt; (The full data set) + .y() = <vector> (Species as a vector) + .x() = <data.frame> (The other 4 columns as a data frame) + .dat() = <data.frame> (The full data set)

To use these in a model fit, pass them to a model specification. diff --git a/docs/dev/reference/fit.html b/docs/dev/reference/fit.html index 37b87cc8f..52522745e 100644 --- a/docs/dev/reference/fit.html +++ b/docs/dev/reference/fit.html @@ -263,7 +263,8 @@

Examp #> #> Fit time: 26ms #> -#> Call: stats::glm(formula = formula, family = stats::binomial, data = data) +#> Call: stats::glm(formula = Class ~ funded_amnt + int_rate, family = stats::binomial, +#> data = data) #> #> Coefficients: #> (Intercept) funded_amnt int_rate @@ -273,9 +274,9 @@

Examp #> Null Deviance: 4055 #> Residual Deviance: 3698 AIC: 3704
using_xy
#> parsnip model object #> -#> Fit time: 15ms +#> Fit time: 23ms #> -#> Call: stats::glm(formula = formula, family = stats::binomial, data = data) +#> Call: stats::glm(formula = ..y ~ ., family = stats::binomial, data = data) #> #> Coefficients: #> (Intercept) funded_amnt int_rate diff --git a/docs/dev/reference/has_multi_predict.html b/docs/dev/reference/has_multi_predict.html index d04cccaef..d8729056c 100644 --- a/docs/dev/reference/has_multi_predict.html +++ b/docs/dev/reference/has_multi_predict.html @@ -204,13 +204,13 @@

Examp multi_predict_args(knn_fit)

#> [1] "neighbors"
multi_predict(knn_fit, mtcars[1, -1], neighbors = 1:4)$.pred
#> [[1]] -#> # A tibble: 4 x 2 +#> # A tibble: 4 x 2 #> neighbors .pred -#> <int> <dbl> -#> 1 1 21 -#> 2 2 21 -#> 3 3 20.9 -#> 4 4 21.0 +#> <int> <dbl> +#> 1 1 21 +#> 2 2 21 +#> 3 3 20.9 +#> 4 4 21.0 #>

## Linear Regression Model Specification (regression)
-## 
-## Computational engine: lm 
-## 
-## Model fit template:
-## stats::lm(formula = missing_arg(), data = missing_arg(), weights = missing_arg())
-
+

linear_reg() %>%
+  set_engine("lm") %>%
+  set_mode("regression") %>%
+  translate()

## Linear Regression Model Specification (regression)
+## 
+## Computational engine: lm 
+## 
+## Model fit template:
+## stats::lm(formula = missing_arg(), data = missing_arg(), weights = missing_arg())

glmnet

-

linear_reg() %&gt;%
-  set_engine("glmnet") %&gt;%
-  set_mode("regression") %&gt;%
-  translate()

## Linear Regression Model Specification (regression)
-## 
-## Computational engine: glmnet 
-## 
-## Model fit template:
-## glmnet::glmnet(x = missing_arg(), y = missing_arg(), weights = missing_arg(), 
-##     family = "gaussian")
-
+

linear_reg() %>%
+  set_engine("glmnet") %>%
+  set_mode("regression") %>%
+  translate()

## Linear Regression Model Specification (regression)
+## 
+## Computational engine: glmnet 
+## 
+## Model fit template:
+## glmnet::glmnet(x = missing_arg(), y = missing_arg(), weights = missing_arg(), 
+##     family = "gaussian")

For glmnet models, the full regularization path is always fit regardless of the value given to penalty. Also, there is the option to @@ -293,17 +291,16 @@

glmnet

results.

stan

-

linear_reg() %&gt;%
-  set_engine("stan") %&gt;%
-  set_mode("regression") %&gt;%
-  translate()

## Linear Regression Model Specification (regression)
-## 
-## Computational engine: stan 
-## 
-## Model fit template:
-## rstanarm::stan_glm(formula = missing_arg(), data = missing_arg(), 
-##     weights = missing_arg(), family = stats::gaussian, refresh = 0)
-
+

linear_reg() %>%
+  set_engine("stan") %>%
+  set_mode("regression") %>%
+  translate()

## Linear Regression Model Specification (regression)
+## 
+## Computational engine: stan 
+## 
+## Model fit template:
+## rstanarm::stan_glm(formula = missing_arg(), data = missing_arg(), 
+##     weights = missing_arg(), family = stats::gaussian, refresh = 0)

Note that the refresh default prevents logging of the estimation process. Change this value in set_engine() will show the logs.

@@ -314,31 +311,29 @@

stan

predictive distribution as appropriate) is returned.

spark

-

linear_reg() %&gt;%
-  set_engine("spark") %&gt;%
-  set_mode("regression") %&gt;%
-  translate()

## Linear Regression Model Specification (regression)
-## 
-## Computational engine: spark 
-## 
-## Model fit template:
-## sparklyr::ml_linear_regression(x = missing_arg(), formula = missing_arg(), 
-##     weight_col = missing_arg())
-
+

linear_reg() %>%
+  set_engine("spark") %>%
+  set_mode("regression") %>%
+  translate()

## Linear Regression Model Specification (regression)
+## 
+## Computational engine: spark 
+## 
+## Model fit template:
+## sparklyr::ml_linear_regression(x = missing_arg(), formula = missing_arg(), 
+##     weight_col = missing_arg())

keras

-

linear_reg() %&gt;%
-  set_engine("keras") %&gt;%
-  set_mode("regression") %&gt;%
-  translate()

## Linear Regression Model Specification (regression)
-## 
-## Computational engine: keras 
-## 
-## Model fit template:
-## parsnip::keras_mlp(x = missing_arg(), y = missing_arg(), hidden_units = 1, 
-##     act = "linear")
-
+

linear_reg() %>%
+  set_engine("keras") %>%
+  set_mode("regression") %>%
+  translate()

## Linear Regression Model Specification (regression)
+## 
+## Computational engine: keras 
+## 
+## Model fit template:
+## parsnip::keras_mlp(x = missing_arg(), y = missing_arg(), hidden_units = 1, 
+##     act = "linear")

Parameter translations

diff --git a/docs/dev/reference/logistic_reg.html b/docs/dev/reference/logistic_reg.html index a1e10c327..4bb0a12d7 100644 --- a/docs/dev/reference/logistic_reg.html +++ b/docs/dev/reference/logistic_reg.html @@ -255,31 +255,29 @@

logistic_reg() %&gt;%
-  set_engine("glm") %&gt;%
-  set_mode("classification") %&gt;%
-  translate()

## Logistic Regression Model Specification (classification)
-## 
-## Computational engine: glm 
-## 
-## Model fit template:
-## stats::glm(formula = missing_arg(), data = missing_arg(), weights = missing_arg(), 
-##     family = stats::binomial)
-
+

logistic_reg() %>%
+  set_engine("glm") %>%
+  set_mode("classification") %>%
+  translate()

## Logistic Regression Model Specification (classification)
+## 
+## Computational engine: glm 
+## 
+## Model fit template:
+## stats::glm(formula = missing_arg(), data = missing_arg(), weights = missing_arg(), 
+##     family = stats::binomial)

glmnet

-

logistic_reg() %&gt;%
-  set_engine("glmnet") %&gt;%
-  set_mode("classification") %&gt;%
-  translate()

## Logistic Regression Model Specification (classification)
-## 
-## Computational engine: glmnet 
-## 
-## Model fit template:
-## glmnet::glmnet(x = missing_arg(), y = missing_arg(), weights = missing_arg(), 
-##     family = "binomial")
-
+

logistic_reg() %>%
+  set_engine("glmnet") %>%
+  set_mode("classification") %>%
+  translate()

## Logistic Regression Model Specification (classification)
+## 
+## Computational engine: glmnet 
+## 
+## Model fit template:
+## glmnet::glmnet(x = missing_arg(), y = missing_arg(), weights = missing_arg(), 
+##     family = "binomial")

For glmnet models, the full regularization path is always fit regardless of the value given to penalty. Also, there is the option to @@ -292,17 +290,16 @@

glmnet

results.

stan

-

logistic_reg() %&gt;%
-  set_engine("stan") %&gt;%
-  set_mode("classification") %&gt;%
-  translate()

## Logistic Regression Model Specification (classification)
-## 
-## Computational engine: stan 
-## 
-## Model fit template:
-## rstanarm::stan_glm(formula = missing_arg(), data = missing_arg(), 
-##     weights = missing_arg(), family = stats::binomial, refresh = 0)
-
+

logistic_reg() %>%
+  set_engine("stan") %>%
+  set_mode("classification") %>%
+  translate()

## Logistic Regression Model Specification (classification)
+## 
+## Computational engine: stan 
+## 
+## Model fit template:
+## rstanarm::stan_glm(formula = missing_arg(), data = missing_arg(), 
+##     weights = missing_arg(), family = stats::binomial, refresh = 0)

Note that the refresh default prevents logging of the estimation process. Change this value in set_engine() will show the logs.

@@ -313,31 +310,29 @@

stan

predictive distribution as appropriate) is returned.

spark

-

logistic_reg() %&gt;%
-  set_engine("spark") %&gt;%
-  set_mode("classification") %&gt;%
-  translate()

## Logistic Regression Model Specification (classification)
-## 
-## Computational engine: spark 
-## 
-## Model fit template:
-## sparklyr::ml_logistic_regression(x = missing_arg(), formula = missing_arg(), 
-##     weight_col = missing_arg(), family = "binomial")
-
+

logistic_reg() %>%
+  set_engine("spark") %>%
+  set_mode("classification") %>%
+  translate()

## Logistic Regression Model Specification (classification)
+## 
+## Computational engine: spark 
+## 
+## Model fit template:
+## sparklyr::ml_logistic_regression(x = missing_arg(), formula = missing_arg(), 
+##     weight_col = missing_arg(), family = "binomial")

keras

-

logistic_reg() %&gt;%
-  set_engine("keras") %&gt;%
-  set_mode("classification") %&gt;%
-  translate()

## Logistic Regression Model Specification (classification)
-## 
-## Computational engine: keras 
-## 
-## Model fit template:
-## parsnip::keras_mlp(x = missing_arg(), y = missing_arg(), hidden_units = 1, 
-##     act = "linear")
-
+

logistic_reg() %>%
+  set_engine("keras") %>%
+  set_mode("classification") %>%
+  translate()

## Logistic Regression Model Specification (classification)
+## 
+## Computational engine: keras 
+## 
+## Model fit template:
+## parsnip::keras_mlp(x = missing_arg(), y = missing_arg(), hidden_units = 1, 
+##     act = "linear")

Parameter translations

diff --git a/docs/dev/reference/mars.html b/docs/dev/reference/mars.html index df178c155..daa4827c9 100644 --- a/docs/dev/reference/mars.html +++ b/docs/dev/reference/mars.html @@ -248,30 +248,28 @@

mars() %&gt;%
-  set_engine("earth") %&gt;%
-  set_mode("regression") %&gt;%
-  translate()

## MARS Model Specification (regression)
-## 
-## Computational engine: earth 
-## 
-## Model fit template:
-## earth::earth(x = missing_arg(), y = missing_arg(), weights = missing_arg(), 
-##     keepxy = TRUE)
-

mars() %&gt;%
-  set_engine("earth") %&gt;%
-  set_mode("classification") %&gt;%
-  translate()

## MARS Model Specification (classification)
-## 
-## Engine-Specific Arguments:
-##   glm = list(family = stats::binomial)
-## 
-## Computational engine: earth 
-## 
-## Model fit template:
-## earth::earth(x = missing_arg(), y = missing_arg(), weights = missing_arg(), 
-##     glm = list(family = stats::binomial), keepxy = TRUE)
-
+

mars() %>%
+  set_engine("earth") %>%
+  set_mode("regression") %>%
+  translate()

## MARS Model Specification (regression)
+## 
+## Computational engine: earth 
+## 
+## Model fit template:
+## earth::earth(formula = missing_arg(), data = missing_arg(), weights = missing_arg(), 
+##     keepxy = TRUE)

mars() %>%
+  set_engine("earth") %>%
+  set_mode("classification") %>%
+  translate()

## MARS Model Specification (classification)
+## 
+## Engine-Specific Arguments:
+##   glm = list(family = stats::binomial)
+## 
+## Computational engine: earth 
+## 
+## Model fit template:
+## earth::earth(formula = missing_arg(), data = missing_arg(), weights = missing_arg(), 
+##     glm = list(family = stats::binomial), keepxy = TRUE)

Note that, when the model is fit, the earth package only has its namespace loaded. However, if multi_predict is used, the package is diff --git a/docs/dev/reference/mlp.html b/docs/dev/reference/mlp.html index 8d4bdcee1..a84fd2ae5 100644 --- a/docs/dev/reference/mlp.html +++ b/docs/dev/reference/mlp.html @@ -276,57 +276,53 @@

mlp() %&gt;%
-  set_engine("keras") %&gt;%
-  set_mode("regression") %&gt;%
-  translate()

## Single Layer Neural Network Specification (regression)
-## 
-## Computational engine: keras 
-## 
-## Model fit template:
-## parsnip::keras_mlp(x = missing_arg(), y = missing_arg())
-

mlp() %&gt;%
-  set_engine("keras") %&gt;%
-  set_mode("classification") %&gt;%
-  translate()

## Single Layer Neural Network Specification (classification)
-## 
-## Computational engine: keras 
-## 
-## Model fit template:
-## parsnip::keras_mlp(x = missing_arg(), y = missing_arg())
-
+

mlp() %>%
+  set_engine("keras") %>%
+  set_mode("regression") %>%
+  translate()

## Single Layer Neural Network Specification (regression)
+## 
+## Computational engine: keras 
+## 
+## Model fit template:
+## parsnip::keras_mlp(x = missing_arg(), y = missing_arg())

mlp() %>%
+  set_engine("keras") %>%
+  set_mode("classification") %>%
+  translate()

## Single Layer Neural Network Specification (classification)
+## 
+## Computational engine: keras 
+## 
+## Model fit template:
+## parsnip::keras_mlp(x = missing_arg(), y = missing_arg())

An error is thrown if both penalty and dropout are specified for keras models.

nnet

-

mlp() %&gt;%
-  set_engine("nnet") %&gt;%
-  set_mode("regression") %&gt;%
-  translate()

## Single Layer Neural Network Specification (regression)
-## 
-## Main Arguments:
-##   hidden_units = 5
-## 
-## Computational engine: nnet 
-## 
-## Model fit template:
-## nnet::nnet(formula = missing_arg(), data = missing_arg(), weights = missing_arg(), 
-##     size = 5, trace = FALSE, linout = TRUE)
-

mlp() %&gt;%
-  set_engine("nnet") %&gt;%
-  set_mode("classification") %&gt;%
-  translate()

## Single Layer Neural Network Specification (classification)
-## 
-## Main Arguments:
-##   hidden_units = 5
-## 
-## Computational engine: nnet 
-## 
-## Model fit template:
-## nnet::nnet(formula = missing_arg(), data = missing_arg(), weights = missing_arg(), 
-##     size = 5, trace = FALSE, linout = FALSE)
-
+

mlp() %>%
+  set_engine("nnet") %>%
+  set_mode("regression") %>%
+  translate()

## Single Layer Neural Network Specification (regression)
+## 
+## Main Arguments:
+##   hidden_units = 5
+## 
+## Computational engine: nnet 
+## 
+## Model fit template:
+## nnet::nnet(formula = missing_arg(), data = missing_arg(), weights = missing_arg(), 
+##     size = 5, trace = FALSE, linout = TRUE)

mlp() %>%
+  set_engine("nnet") %>%
+  set_mode("classification") %>%
+  translate()

## Single Layer Neural Network Specification (classification)
+## 
+## Main Arguments:
+##   hidden_units = 5
+## 
+## Computational engine: nnet 
+## 
+## Model fit template:
+## nnet::nnet(formula = missing_arg(), data = missing_arg(), weights = missing_arg(), 
+##     size = 5, trace = FALSE, linout = FALSE)

Parameter translations

diff --git a/docs/dev/reference/model_fit.html b/docs/dev/reference/model_fit.html index d438aa8c2..fc4198ce9 100644 --- a/docs/dev/reference/model_fit.html +++ b/docs/dev/reference/model_fit.html @@ -189,10 +189,10 @@

Examp fit_obj <- fit(spec_obj, mpg ~ ., data = mtcars) fit_obj
#> parsnip model object #> -#> Fit time: 1ms +#> Fit time: 2ms #> #> Call: -#> stats::lm(formula = formula, data = data, x = ~ifelse(.obs() < +#> stats::lm(formula = mpg ~ ., data = data, x = ~ifelse(.obs() < #> 500, TRUE, FALSE)) #> #> Coefficients: diff --git a/docs/dev/reference/model_spec.html b/docs/dev/reference/model_spec.html index 310fa17a2..53794323f 100644 --- a/docs/dev/reference/model_spec.html +++ b/docs/dev/reference/model_spec.html @@ -200,7 +200,7 @@

multinom_reg() %&gt;%
-  set_engine("glmnet") %&gt;%
-  set_mode("classification") %&gt;%
-  translate()

## Multinomial Regression Model Specification (classification)
-## 
-## Computational engine: glmnet 
-## 
-## Model fit template:
-## glmnet::glmnet(x = missing_arg(), y = missing_arg(), weights = missing_arg(), 
-##     family = "multinomial")
-
+

multinom_reg() %>%
+  set_engine("glmnet") %>%
+  set_mode("classification") %>%
+  translate()

## Multinomial Regression Model Specification (classification)
+## 
+## Computational engine: glmnet 
+## 
+## Model fit template:
+## glmnet::glmnet(x = missing_arg(), y = missing_arg(), weights = missing_arg(), 
+##     family = "multinomial")

For glmnet models, the full regularization path is always fit regardless of the value given to penalty. Also, there is the option to @@ -277,45 +276,42 @@

multinom_reg() %&gt;%
-  set_engine("nnet") %&gt;%
-  set_mode("classification") %&gt;%
-  translate()

## Multinomial Regression Model Specification (classification)
-## 
-## Computational engine: nnet 
-## 
-## Model fit template:
-## nnet::multinom(formula = missing_arg(), data = missing_arg(), 
-##     weights = missing_arg(), trace = FALSE)
-
+

multinom_reg() %>%
+  set_engine("nnet") %>%
+  set_mode("classification") %>%
+  translate()

## Multinomial Regression Model Specification (classification)
+## 
+## Computational engine: nnet 
+## 
+## Model fit template:
+## nnet::multinom(formula = missing_arg(), data = missing_arg(), 
+##     weights = missing_arg(), trace = FALSE)

spark

-

multinom_reg() %&gt;%
-  set_engine("spark") %&gt;%
-  set_mode("classification") %&gt;%
-  translate()

## Multinomial Regression Model Specification (classification)
-## 
-## Computational engine: spark 
-## 
-## Model fit template:
-## sparklyr::ml_logistic_regression(x = missing_arg(), formula = missing_arg(), 
-##     weight_col = missing_arg(), family = "multinomial")
-
+

multinom_reg() %>%
+  set_engine("spark") %>%
+  set_mode("classification") %>%
+  translate()

## Multinomial Regression Model Specification (classification)
+## 
+## Computational engine: spark 
+## 
+## Model fit template:
+## sparklyr::ml_logistic_regression(x = missing_arg(), formula = missing_arg(), 
+##     weight_col = missing_arg(), family = "multinomial")

keras

-

multinom_reg() %&gt;%
-  set_engine("keras") %&gt;%
-  set_mode("classification") %&gt;%
-  translate()

## Multinomial Regression Model Specification (classification)
-## 
-## Computational engine: keras 
-## 
-## Model fit template:
-## parsnip::keras_mlp(x = missing_arg(), y = missing_arg(), hidden_units = 1, 
-##     act = "linear")
-
+

multinom_reg() %>%
+  set_engine("keras") %>%
+  set_mode("classification") %>%
+  translate()

## Multinomial Regression Model Specification (classification)
+## 
+## Computational engine: keras 
+## 
+## Model fit template:
+## parsnip::keras_mlp(x = missing_arg(), y = missing_arg(), hidden_units = 1, 
+##     act = "linear")

Parameter translations

diff --git a/docs/dev/reference/nearest_neighbor.html b/docs/dev/reference/nearest_neighbor.html index 4b5c5a7fa..73e82ad99 100644 --- a/docs/dev/reference/nearest_neighbor.html +++ b/docs/dev/reference/nearest_neighbor.html @@ -222,27 +222,25 @@

nearest_neighbor() %&gt;%
-  set_engine("kknn") %&gt;%
-  set_mode("regression") %&gt;%
-  translate()

## K-Nearest Neighbor Model Specification (regression)
-## 
-## Computational engine: kknn 
-## 
-## Model fit template:
-## kknn::train.kknn(formula = missing_arg(), data = missing_arg(), 
-##     ks = 5)
-

nearest_neighbor() %&gt;%
-  set_engine("kknn") %&gt;%
-  set_mode("classification") %&gt;%
-  translate()

## K-Nearest Neighbor Model Specification (classification)
-## 
-## Computational engine: kknn 
-## 
-## Model fit template:
-## kknn::train.kknn(formula = missing_arg(), data = missing_arg(), 
-##     ks = 5)
-
+

nearest_neighbor() %>%
+  set_engine("kknn") %>%
+  set_mode("regression") %>%
+  translate()

## K-Nearest Neighbor Model Specification (regression)
+## 
+## Computational engine: kknn 
+## 
+## Model fit template:
+## kknn::train.kknn(formula = missing_arg(), data = missing_arg(), 
+##     ks = 5)

nearest_neighbor() %>%
+  set_engine("kknn") %>%
+  set_mode("classification") %>%
+  translate()

## K-Nearest Neighbor Model Specification (classification)
+## 
+## Computational engine: kknn 
+## 
+## Model fit template:
+## kknn::train.kknn(formula = missing_arg(), data = missing_arg(), 
+##     ks = 5)

For kknn, the underlying modeling function used is a restricted version of train.kknn() and not kknn(). It is set up in this way so diff --git a/docs/dev/reference/null_model.html b/docs/dev/reference/null_model.html index c299fd299..f5890685b 100644 --- a/docs/dev/reference/null_model.html +++ b/docs/dev/reference/null_model.html @@ -171,25 +171,23 @@

null_model() %&gt;%
-  set_engine("parsnip") %&gt;%
-  set_mode("regression") %&gt;%
-  translate()

## Model Specification (regression)
-## 
-## Computational engine: parsnip 
-## 
-## Model fit template:
-## nullmodel(x = missing_arg(), y = missing_arg())
-

null_model() %&gt;%
-  set_engine("parsnip") %&gt;%
-  set_mode("classification") %&gt;%
-  translate()

## Model Specification (classification)
-## 
-## Computational engine: parsnip 
-## 
-## Model fit template:
-## nullmodel(x = missing_arg(), y = missing_arg())
-
+

null_model() %>%
+  set_engine("parsnip") %>%
+  set_mode("regression") %>%
+  translate()

## Model Specification (regression)
+## 
+## Computational engine: parsnip 
+## 
+## Model fit template:
+## nullmodel(x = missing_arg(), y = missing_arg())

null_model() %>%
+  set_engine("parsnip") %>%
+  set_mode("classification") %>%
+  translate()

## Model Specification (classification)
+## 
+## Computational engine: parsnip 
+## 
+## Model fit template:
+## nullmodel(x = missing_arg(), y = missing_arg())

See also

diff --git a/docs/dev/reference/predict.model_fit.html b/docs/dev/reference/predict.model_fit.html index 8250b21d9..21f71b4f6 100644 --- a/docs/dev/reference/predict.model_fit.html +++ b/docs/dev/reference/predict.model_fit.html @@ -253,37 +253,37 @@

Examp slice(1:10) %>% select(-mpg) -predict(lm_model, pred_cars)
#> # A tibble: 10 x 1 +predict(lm_model, pred_cars)
#> # A tibble: 10 x 1 #> .pred -#> <dbl> -#> 1 23.4 -#> 2 23.3 -#> 3 27.6 -#> 4 21.5 -#> 5 17.6 -#> 6 21.6 -#> 7 13.9 -#> 8 21.7 -#> 9 25.6 -#> 10 17.1
+#> <dbl> +#> 1 23.4 +#> 2 23.3 +#> 3 27.6 +#> 4 21.5 +#> 5 17.6 +#> 6 21.6 +#> 7 13.9 +#> 8 21.7 +#> 9 25.6 +#> 10 17.1
predict( lm_model, pred_cars, type = "conf_int", level = 0.90 -)
#> # A tibble: 10 x 2 +)
#> # A tibble: 10 x 2 #> .pred_lower .pred_upper -#> <dbl> <dbl> -#> 1 17.9 29.0 -#> 2 18.1 28.5 -#> 3 24.0 31.3 -#> 4 17.5 25.6 -#> 5 14.3 20.8 -#> 6 17.0 26.2 -#> 7 9.65 18.2 -#> 8 16.2 27.2 -#> 9 14.2 37.0 -#> 10 11.5 22.7
+#> <dbl> <dbl> +#> 1 17.9 29.0 +#> 2 18.1 28.5 +#> 3 24.0 31.3 +#> 4 17.5 25.6 +#> 5 14.3 20.8 +#> 6 17.0 26.2 +#> 7 9.65 18.2 +#> 8 16.2 27.2 +#> 9 14.2 37.0 +#> 10 11.5 22.7
predict( lm_model, pred_cars, diff --git a/docs/dev/reference/rand_forest.html b/docs/dev/reference/rand_forest.html index c045444a6..e9fbe86c2 100644 --- a/docs/dev/reference/rand_forest.html +++ b/docs/dev/reference/rand_forest.html @@ -256,29 +256,27 @@

rand_forest() %&gt;%
-  set_engine("ranger") %&gt;%
-  set_mode("regression") %&gt;%
-  translate()

## Random Forest Model Specification (regression)
-## 
-## Computational engine: ranger 
-## 
-## Model fit template:
-## ranger::ranger(formula = missing_arg(), data = missing_arg(), 
-##     case.weights = missing_arg(), num.threads = 1, verbose = FALSE, 
-##     seed = sample.int(10^5, 1))
-

rand_forest() %&gt;%
-  set_engine("ranger") %&gt;%
-  set_mode("classification") %&gt;%
-  translate()

## Random Forest Model Specification (classification)
-## 
-## Computational engine: ranger 
-## 
-## Model fit template:
-## ranger::ranger(formula = missing_arg(), data = missing_arg(), 
-##     case.weights = missing_arg(), num.threads = 1, verbose = FALSE, 
-##     seed = sample.int(10^5, 1), probability = TRUE)
-
+

rand_forest() %>%
+  set_engine("ranger") %>%
+  set_mode("regression") %>%
+  translate()

## Random Forest Model Specification (regression)
+## 
+## Computational engine: ranger 
+## 
+## Model fit template:
+## ranger::ranger(formula = missing_arg(), data = missing_arg(), 
+##     case.weights = missing_arg(), num.threads = 1, verbose = FALSE, 
+##     seed = sample.int(10^5, 1))

rand_forest() %>%
+  set_engine("ranger") %>%
+  set_mode("classification") %>%
+  translate()

## Random Forest Model Specification (classification)
+## 
+## Computational engine: ranger 
+## 
+## Model fit template:
+## ranger::ranger(formula = missing_arg(), data = missing_arg(), 
+##     case.weights = missing_arg(), num.threads = 1, verbose = FALSE, 
+##     seed = sample.int(10^5, 1), probability = TRUE)

Note that ranger::ranger() does not require factor predictors to be converted to indicator variables.

@@ -288,52 +286,48 @@

rand_forest() %&gt;%
-  set_engine("randomForest") %&gt;%
-  set_mode("regression") %&gt;%
-  translate()

## Random Forest Model Specification (regression)
-## 
-## Computational engine: randomForest 
-## 
-## Model fit template:
-## randomForest::randomForest(x = missing_arg(), y = missing_arg())
-

rand_forest() %&gt;%
-  set_engine("randomForest") %&gt;%
-  set_mode("classification") %&gt;%
-  translate()

## Random Forest Model Specification (classification)
-## 
-## Computational engine: randomForest 
-## 
-## Model fit template:
-## randomForest::randomForest(x = missing_arg(), y = missing_arg())
-
+

rand_forest() %>%
+  set_engine("randomForest") %>%
+  set_mode("regression") %>%
+  translate()

## Random Forest Model Specification (regression)
+## 
+## Computational engine: randomForest 
+## 
+## Model fit template:
+## randomForest::randomForest(x = missing_arg(), y = missing_arg())

rand_forest() %>%
+  set_engine("randomForest") %>%
+  set_mode("classification") %>%
+  translate()

## Random Forest Model Specification (classification)
+## 
+## Computational engine: randomForest 
+## 
+## Model fit template:
+## randomForest::randomForest(x = missing_arg(), y = missing_arg())

Note that randomForest::randomForest() does not require factor predictors to be converted to indicator variables.

spark

-

rand_forest() %&gt;%
-  set_engine("spark") %&gt;%
-  set_mode("regression") %&gt;%
-  translate()

## Random Forest Model Specification (regression)
-## 
-## Computational engine: spark 
-## 
-## Model fit template:
-## sparklyr::ml_random_forest(x = missing_arg(), formula = missing_arg(), 
-##     type = "regression", seed = sample.int(10^5, 1))
-

rand_forest() %&gt;%
-  set_engine("spark") %&gt;%
-  set_mode("classification") %&gt;%
-  translate()

## Random Forest Model Specification (classification)
-## 
-## Computational engine: spark 
-## 
-## Model fit template:
-## sparklyr::ml_random_forest(x = missing_arg(), formula = missing_arg(), 
-##     type = "classification", seed = sample.int(10^5, 1))
-
+

rand_forest() %>%
+  set_engine("spark") %>%
+  set_mode("regression") %>%
+  translate()

## Random Forest Model Specification (regression)
+## 
+## Computational engine: spark 
+## 
+## Model fit template:
+## sparklyr::ml_random_forest(x = missing_arg(), formula = missing_arg(), 
+##     type = "regression", seed = sample.int(10^5, 1))

rand_forest() %>%
+  set_engine("spark") %>%
+  set_mode("classification") %>%
+  translate()

## Random Forest Model Specification (classification)
+## 
+## Computational engine: spark 
+## 
+## Model fit template:
+## sparklyr::ml_random_forest(x = missing_arg(), formula = missing_arg(), 
+##     type = "classification", seed = sample.int(10^5, 1))

Parameter translations

diff --git a/docs/dev/reference/reexports.html b/docs/dev/reference/reexports.html index 1d4231dc8..823e3eba5 100644 --- a/docs/dev/reference/reexports.html +++ b/docs/dev/reference/reexports.html @@ -149,7 +149,7 @@

Objects exported from other packages

These objects are imported from other packages. Follow the links below to see their documentation.

- +
generics

fit, fit_xy, tidy, varying_args

magrittr

%>%

diff --git a/docs/dev/reference/repair_call.html b/docs/dev/reference/repair_call.html new file mode 100644 index 000000000..bb6f503dc --- /dev/null +++ b/docs/dev/reference/repair_call.html @@ -0,0 +1,220 @@ + + + + + + + + +Repair a model call object — repair_call • parsnip + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+ + + + +
+ +
+
+ + +
+

When the user passes a formula to fit() and the underyling model function +uses a formula, the call object produced by fit() may not be usable by +other functions. For example, some arguments may still be quosures and the +data portion of the call will not correspond to the original data.

+
+ +
repair_call(x, data)
+ +

Arguments

+ + + + + + + + + + +
x

A fitted parsnip model. An error will occur if the underlying model +does not have a call element.

data

A data object that is relavant to the call. In most cases, this +is the data frame that was given to parsnip for the model fit (i.e., the +training set data). The name of this data object is inserted into the call.

+ +

Value

+ +

A modified parsnip fitted model.

+

Details

+ +

repair_call() call can adjust the model objects call to be usable by other +functions and methods.

+ +

Examples

+
+fitted_model <- + linear_reg() %>% + set_engine("lm", model = TRUE) %>% + fit(mpg ~ ., data = mtcars) + +# In this call, note that `data` is not `mtcars` and the `model = ~TRUE` +# indicates that the `model` argument is an `rlang` quosure. +fitted_model$fit$call
#> stats::lm(formula = mpg ~ ., data = data, model = ~TRUE)
+# All better: +repair_call(fitted_model, mtcars)$fit$call
#> stats::lm(formula = mpg ~ ., data = mtcars, model = TRUE)
+
+ +
+ + +
+
+

parsnip is a part of the tidymodels ecosystem, a collection of modeling packages designed with common APIs and a shared philosophy.

+
+ +
+

+ Developed by Max Kuhn, Davis Vaughan. + Site built by pkgdown. +

+
+ +
+
+ + + + + + + + diff --git a/docs/dev/reference/surv_reg.html b/docs/dev/reference/surv_reg.html index 3612ec471..332f4aedc 100644 --- a/docs/dev/reference/surv_reg.html +++ b/docs/dev/reference/surv_reg.html @@ -226,31 +226,29 @@

surv_reg() %&gt;%
-  set_engine("flexsurv") %&gt;%
-  set_mode("regression") %&gt;%
-  translate()

## Parametric Survival Regression Model Specification (regression)
-## 
-## Computational engine: flexsurv 
-## 
-## Model fit template:
-## flexsurv::flexsurvreg(formula = missing_arg(), data = missing_arg(), 
-##     weights = missing_arg())
-
+

surv_reg() %>%
+  set_engine("flexsurv") %>%
+  set_mode("regression") %>%
+  translate()

## Parametric Survival Regression Model Specification (regression)
+## 
+## Computational engine: flexsurv 
+## 
+## Model fit template:
+## flexsurv::flexsurvreg(formula = missing_arg(), data = missing_arg(), 
+##     weights = missing_arg())

survival

-

surv_reg() %&gt;%
-  set_engine("survival") %&gt;%
-  set_mode("regression") %&gt;%
-  translate()

## Parametric Survival Regression Model Specification (regression)
-## 
-## Computational engine: survival 
-## 
-## Model fit template:
-## survival::survreg(formula = missing_arg(), data = missing_arg(), 
-##     weights = missing_arg(), model = TRUE)
-
+

surv_reg() %>%
+  set_engine("survival") %>%
+  set_mode("regression") %>%
+  translate()

## Parametric Survival Regression Model Specification (regression)
+## 
+## Computational engine: survival 
+## 
+## Model fit template:
+## survival::survreg(formula = missing_arg(), data = missing_arg(), 
+##     weights = missing_arg(), model = TRUE)

Note that model = TRUE is needed to produce quantile predictions when there is a stratification variable and can be overridden in other cases.

diff --git a/docs/dev/reference/svm_poly.html b/docs/dev/reference/svm_poly.html index 161acd1a3..6d8d510eb 100644 --- a/docs/dev/reference/svm_poly.html +++ b/docs/dev/reference/svm_poly.html @@ -253,26 +253,24 @@

svm_poly() %&gt;%
-  set_engine("kernlab") %&gt;%
-  set_mode("regression") %&gt;%
-  translate()

## Polynomial Support Vector Machine Specification (regression)
-## 
-## Computational engine: kernlab 
-## 
-## Model fit template:
-## kernlab::ksvm(x = missing_arg(), y = missing_arg(), kernel = "polydot")
-

svm_poly() %&gt;%
-  set_engine("kernlab") %&gt;%
-  set_mode("classification") %&gt;%
-  translate()

## Polynomial Support Vector Machine Specification (classification)
-## 
-## Computational engine: kernlab 
-## 
-## Model fit template:
-## kernlab::ksvm(x = missing_arg(), y = missing_arg(), kernel = "polydot", 
-##     prob.model = TRUE)
-
+

svm_poly() %>%
+  set_engine("kernlab") %>%
+  set_mode("regression") %>%
+  translate()

## Polynomial Support Vector Machine Specification (regression)
+## 
+## Computational engine: kernlab 
+## 
+## Model fit template:
+## kernlab::ksvm(x = missing_arg(), data = missing_arg(), kernel = "polydot")

svm_poly() %>%
+  set_engine("kernlab") %>%
+  set_mode("classification") %>%
+  translate()

## Polynomial Support Vector Machine Specification (classification)
+## 
+## Computational engine: kernlab 
+## 
+## Model fit template:
+## kernlab::ksvm(x = missing_arg(), data = missing_arg(), kernel = "polydot", 
+##     prob.model = TRUE)

Parameter translations

diff --git a/docs/dev/reference/svm_rbf.html b/docs/dev/reference/svm_rbf.html index a2834a923..f0690ecb4 100644 --- a/docs/dev/reference/svm_rbf.html +++ b/docs/dev/reference/svm_rbf.html @@ -243,50 +243,46 @@

svm_rbf() %&gt;%
-  set_engine("kernlab") %&gt;%
-  set_mode("regression") %&gt;%
-  translate()

## Radial Basis Function Support Vector Machine Specification (regression)
-## 
-## Computational engine: kernlab 
-## 
-## Model fit template:
-## kernlab::ksvm(x = missing_arg(), y = missing_arg(), kernel = "rbfdot")
-

svm_rbf() %&gt;%
-  set_engine("kernlab") %&gt;%
-  set_mode("classification") %&gt;%
-  translate()

## Radial Basis Function Support Vector Machine Specification (classification)
-## 
-## Computational engine: kernlab 
-## 
-## Model fit template:
-## kernlab::ksvm(x = missing_arg(), y = missing_arg(), kernel = "rbfdot", 
-##     prob.model = TRUE)
-
+

svm_rbf() %>%
+  set_engine("kernlab") %>%
+  set_mode("regression") %>%
+  translate()

## Radial Basis Function Support Vector Machine Specification (regression)
+## 
+## Computational engine: kernlab 
+## 
+## Model fit template:
+## kernlab::ksvm(x = missing_arg(), data = missing_arg(), kernel = "rbfdot")

svm_rbf() %>%
+  set_engine("kernlab") %>%
+  set_mode("classification") %>%
+  translate()

## Radial Basis Function Support Vector Machine Specification (classification)
+## 
+## Computational engine: kernlab 
+## 
+## Model fit template:
+## kernlab::ksvm(x = missing_arg(), data = missing_arg(), kernel = "rbfdot", 
+##     prob.model = TRUE)

liquidSVM

-

svm_rbf() %&gt;%
-  set_engine("liquidSVM") %&gt;%
-  set_mode("regression") %&gt;%
-  translate()

## Radial Basis Function Support Vector Machine Specification (regression)
-## 
-## Computational engine: liquidSVM 
-## 
-## Model fit template:
-## liquidSVM::svm(x = missing_arg(), y = missing_arg(), folds = 1, 
-##     threads = 0)
-

svm_rbf() %&gt;%
-  set_engine("liquidSVM") %&gt;%
-  set_mode("classification") %&gt;%
-  translate()

## Radial Basis Function Support Vector Machine Specification (classification)
-## 
-## Computational engine: liquidSVM 
-## 
-## Model fit template:
-## liquidSVM::svm(x = missing_arg(), y = missing_arg(), folds = 1, 
-##     threads = 0)
-
+

svm_rbf() %>%
+  set_engine("liquidSVM") %>%
+  set_mode("regression") %>%
+  translate()

## Radial Basis Function Support Vector Machine Specification (regression)
+## 
+## Computational engine: liquidSVM 
+## 
+## Model fit template:
+## liquidSVM::svm(x = missing_arg(), y = missing_arg(), folds = 1, 
+##     threads = 0)

svm_rbf() %>%
+  set_engine("liquidSVM") %>%
+  set_mode("classification") %>%
+  translate()

## Radial Basis Function Support Vector Machine Specification (classification)
+## 
+## Computational engine: liquidSVM 
+## 
+## Model fit template:
+## liquidSVM::svm(x = missing_arg(), y = missing_arg(), folds = 1, 
+##     threads = 0)

Note that models created using the liquidSVM engine cannot be saved like conventional R objects. The fit slot of the model_fit object diff --git a/docs/dev/reference/tidy.nullmodel.html b/docs/dev/reference/tidy.nullmodel.html index ddc40e72a..80b406d17 100644 --- a/docs/dev/reference/tidy.nullmodel.html +++ b/docs/dev/reference/tidy.nullmodel.html @@ -164,14 +164,14 @@

Value

A tibble with column value.

Examples

-
nullmodel(iris[,-5], iris$Species) %>% tidy()
#> # A tibble: 1 x 1 +
nullmodel(iris[,-5], iris$Species) %>% tidy()
#> # A tibble: 1 x 1 #> value -#> <chr> -#> 1 setosa
-nullmodel(mtcars[,-1], mtcars$mpg) %>% tidy()
#> # A tibble: 1 x 1 +#> <chr> +#> 1 setosa
+nullmodel(mtcars[,-1], mtcars$mpg) %>% tidy()
#> # A tibble: 1 x 1 #> value -#> <dbl> -#> 1 20.1
+#>
<dbl> +#> 1 20.1