Skip to content
Browse files

Add .paropts settings to all **ply.

Closes #84.
  • Loading branch information...
1 parent 5fcf0bb commit a3c8618b5f8d50ac688a83e817777c8276dedcc6 @hadley committed Oct 11, 2012
Showing with 206 additions and 61 deletions.
  1. +2 −0 NEWS
  2. +4 −2 R/a_ply.r
  3. +4 −2 R/aaply.r
  4. +2 −2 R/adply.r
  5. +2 −2 R/alply.r
  6. +4 −2 R/d_ply.r
  7. +3 −2 R/daply.r
  8. +2 −2 R/ddply.r
  9. +2 −2 R/dlply.r
  10. +7 −3 R/l_ply.r
  11. +2 −2 R/laply.r
  12. +2 −2 R/ldply.r
  13. +4 −7 R/llply.r
  14. +3 −2 R/m_ply.r
  15. +2 −2 R/maply.r
  16. +3 −2 R/mdply.r
  17. +3 −2 R/mlply.r
  18. +6 −0 man-roxygen/ply.r
  19. +10 −1 man/a_ply.Rd
  20. +10 −1 man/aaply.Rd
  21. +9 −1 man/adply.Rd
  22. +9 −1 man/alply.Rd
  23. +9 −1 man/d_ply.Rd
  24. +9 −1 man/daply.Rd
  25. +10 −1 man/ddply.Rd
  26. +10 −1 man/dlply.Rd
  27. +9 −1 man/l_ply.Rd
  28. +9 −1 man/laply.Rd
  29. +9 −1 man/ldply.Rd
  30. +8 −8 man/llply.Rd
  31. +10 −1 man/m_ply.Rd
  32. +10 −1 man/maply.Rd
  33. +9 −1 man/mdply.Rd
  34. +9 −1 man/mlply.Rd
View
2 NEWS
@@ -1,6 +1,8 @@
Version 1.7.1.99
------------------------------------------------------------------------------
+* `**ply` gains a `.paropts` argumetns, a list of options that is passed onto `foreach` for controlling parallel computation.
+
* New function `here` makes it possible to use `**ply` + a function that uses non-standard evaluation (e.g. `summarise`, `mutate`, `subset`, `arrange`) inside a function. (Thanks to Peter Meilstrup, #3)
* The subsetting in `d*ply` has been considerably optimised: this will have a small impact unless you have a very large number of groups, in which case it will be considerably faster.
View
6 R/a_ply.r
@@ -7,9 +7,11 @@
#' @template -_
#' @export
a_ply <- function(.data, .margins, .fun = NULL, ..., .expand = TRUE,
- .progress = "none", .print = FALSE, .parallel = FALSE) {
+ .progress = "none", .print = FALSE, .parallel = FALSE,
+ .paropts = NULL) {
pieces <- splitter_a(.data, .margins, .expand)
l_ply(.data = pieces, .fun = .fun, ...,
- .progress = .progress, .print = .print, .parallel = .parallel)
+ .progress = .progress, .print = .print, .parallel = .parallel,
+ .paropts = .paropts)
}
View
6 R/aaply.r
@@ -31,9 +31,11 @@
#'
#' aaply(ozone, 1:2, diff)
aaply <- function(.data, .margins, .fun = NULL, ..., .expand = TRUE,
- .progress = "none", .drop = TRUE, .parallel = FALSE) {
+ .progress = "none", .drop = TRUE, .parallel = FALSE,
+ .paropts = NULL) {
pieces <- splitter_a(.data, .margins, .expand)
laply(.data = pieces, .fun = .fun, ...,
- .progress = .progress, .drop = .drop, .parallel = .parallel)
+ .progress = .progress, .drop = .drop, .parallel = .parallel,
+ .paropts = .paropts)
}
View
4 R/adply.r
@@ -8,9 +8,9 @@
#' @template -d
#' @export
adply <- function(.data, .margins, .fun = NULL, ..., .expand = TRUE,
- .progress = "none", .parallel = FALSE) {
+ .progress = "none", .parallel = FALSE, .paropts = NULL) {
pieces <- splitter_a(.data, .margins, .expand)
ldply(.data = pieces, .fun = .fun, ...,
- .progress = .progress, .parallel = .parallel)
+ .progress = .progress, .parallel = .parallel, .paropts = .paropts)
}
View
4 R/alply.r
@@ -14,9 +14,9 @@
#' alply(ozone, 3, quantile)
#' alply(ozone, 3, function(x) table(round(x)))
alply <- function(.data, .margins, .fun = NULL, ..., .expand = TRUE,
- .progress = "none", .parallel = FALSE) {
+ .progress = "none", .parallel = FALSE, .paropts = NULL) {
pieces <- splitter_a(.data, .margins, .expand)
llply(.data = pieces, .fun = .fun, ...,
- .progress = .progress, .parallel = .parallel)
+ .progress = .progress, .parallel = .parallel, .paropts = .paropts)
}
View
6 R/d_ply.r
@@ -7,10 +7,12 @@
#' @template -_
#' @export
d_ply <- function(.data, .variables, .fun = NULL, ..., .progress = "none",
- .drop = TRUE, .print = FALSE, .parallel = FALSE) {
+ .drop = TRUE, .print = FALSE, .parallel = FALSE,
+ .paropts = NULL) {
.variables <- as.quoted(.variables)
pieces <- splitter_d(.data, .variables, .drop = .drop)
l_ply(.data = pieces, .fun = .fun, ...,
- .progress = .progress, .print = .print, .parallel = .parallel)
+ .progress = .progress, .print = .print, .parallel = .parallel,
+ .paropts = .paropts)
}
View
5 R/daply.r
@@ -32,10 +32,11 @@
#' daply(baseball[, 6:9], .(baseball$year), colwise(mean))
#' daply(baseball, .(year), function(df) colwise(mean)(df[, 6:9]))
daply <- function(.data, .variables, .fun = NULL, ..., .progress = "none",
- .drop_i = TRUE, .drop_o = TRUE, .parallel = FALSE) {
+ .drop_i = TRUE, .drop_o = TRUE, .parallel = FALSE, .paropts = NULL) {
.variables <- as.quoted(.variables)
pieces <- splitter_d(.data, .variables, drop = .drop_i)
laply(.data = pieces, .fun = .fun, ...,
- .progress = .progress, .drop = .drop_o, .parallel = .parallel)
+ .progress = .progress, .drop = .drop_o, .parallel = .parallel,
+ .paropts = .paropts)
}
View
4 R/ddply.r
@@ -47,11 +47,11 @@
#' career_year = year - min(year) + 1
#' )
ddply <- function(.data, .variables, .fun = NULL, ..., .progress = "none",
- .drop = TRUE, .parallel = FALSE) {
+ .drop = TRUE, .parallel = FALSE, .paropts = NULL) {
if (empty(.data)) return(.data)
.variables <- as.quoted(.variables)
pieces <- splitter_d(.data, .variables, drop = .drop)
ldply(.data = pieces, .fun = .fun, ...,
- .progress = .progress, .parallel = .parallel)
+ .progress = .progress, .parallel = .parallel, .paropts = .paropts)
}
View
4 R/dlply.r
@@ -20,10 +20,10 @@
#' qual <- laply(models, function(mod) summary(mod)$r.squared)
#' hist(qual)
dlply <- function(.data, .variables, .fun = NULL, ..., .progress = "none",
- .drop = TRUE, .parallel = FALSE) {
+ .drop = TRUE, .parallel = FALSE, .paropts = NULL) {
.variables <- as.quoted(.variables)
pieces <- splitter_d(.data, .variables, drop = .drop)
llply(.data = pieces, .fun = .fun, ...,
- .progress = .progress, .parallel = .parallel)
+ .progress = .progress, .parallel = .parallel, .paropts = .paropts)
}
View
10 R/l_ply.r
@@ -7,7 +7,7 @@
#' @template -_
#' @export
l_ply <- function(.data, .fun = NULL, ..., .progress = "none", .print = FALSE,
- .parallel = FALSE) {
+ .parallel = FALSE, .paropts = NULL) {
if (is.character(.fun) || is.list(.fun)) .fun <- each(.fun)
if (!is.function(.fun)) stop(".fun is not a function.")
@@ -20,8 +20,12 @@ l_ply <- function(.data, .fun = NULL, ..., .progress = "none", .print = FALSE,
if (.progress != "none") message("Progress disabled for parallel processing")
setup_parallel()
- ignore <- function(...) NULL
- foreach(d = .data, .combine = ignore) %dopar% .fun(d, ...)
+ .paropts$.combine <- function(...) NULL
+ fe_call <- as.call(c(list(as.name("foreach"), d = as.name(".data")),
+ .paropts))
+ fe <- eval(fe_call)
+
+ fe %dopar% .fun(d, ...)
} else {
.data <- as.list(.data)
for(i in seq_along(.data)) {
View
4 R/laply.r
@@ -22,13 +22,13 @@
#' laply(seq_len(10), rep, times = 4)
#' laply(seq_len(10), matrix, nrow = 2, ncol = 2)
laply <- function(.data, .fun = NULL, ..., .progress = "none", .drop = TRUE,
- .parallel = FALSE) {
+ .parallel = FALSE, .paropts = NULL) {
if (is.character(.fun)) .fun <- do.call("each", as.list(.fun))
if (!is.function(.fun)) stop(".fun is not a function.")
if (!inherits(.data, "split")) .data <- as.list(.data)
res <- llply(.data = .data, .fun = .fun, ...,
- .progress = .progress, .parallel = .parallel)
+ .progress = .progress, .parallel = .parallel, .paropts = .paropts)
list_to_array(res, attr(.data, "split_labels"), .drop)
}
View
4 R/ldply.r
@@ -8,10 +8,10 @@
#' @template -d
#' @export
ldply <- function(.data, .fun = NULL, ..., .progress = "none",
- .parallel = FALSE) {
+ .parallel = FALSE, .paropts = NULL) {
if (!inherits(.data, "split")) .data <- as.list(.data)
res <- llply(.data = .data, .fun = .fun, ...,
- .progress = .progress, .parallel = .parallel)
+ .progress = .progress, .parallel = .parallel, .paropts = .paropts)
list_to_dataframe(res, attr(.data, "split_labels"))
}
View
11 R/llply.r
@@ -11,12 +11,6 @@
#' @param .inform produce informative error messages? This is turned off by
#' by default because it substantially slows processing speed, but is very
#' useful for debugging
-#' @param .paropts a list of additional options passed into
-#' the \code{\link[foreach]{foreach}} function when parallel computation
-#' is enabled. This is important if (for example) your code relies on
-#' external data or packages: use the \code{.export} and \code{.packages}
-#' arguments to supply them so that all cluster nodes have the correct
-#' environment set up for computing.
#' @export
#' @examples
#' llply(llply(mtcars, round), table)
@@ -76,7 +70,10 @@ llply <- function(.data, .fun = NULL, ..., .progress = "none", .inform = FALSE,
}
if (.parallel) {
setup_parallel()
- fe <- parallel_fe(n, .paropts)
+
+ i <- seq_len(n)
+ fe_call <- as.call(c(list(as.name("foreach"), i = i), .paropts))
+ fe <- eval(fe_call)
result <- fe %dopar% do.ply(i)
} else {
View
5 R/m_ply.r
@@ -8,10 +8,11 @@
#' @template -_
#' @export
m_ply <- function(.data, .fun = NULL, ..., .expand = TRUE, .progress = "none",
- .print = FALSE, .parallel = FALSE ) {
+ .print = FALSE, .parallel = FALSE, .paropts = NULL) {
if (is.matrix(.data) & !is.list(.data)) .data <- .matrix_to_df(.data)
f <- splat(.fun)
a_ply(.data = .data, .margins = 1, .fun = f, ..., .expand = .expand,
- .progress = .progress, .print = .print, .parallel = .parallel)
+ .progress = .progress, .print = .print, .parallel = .parallel,
+ .paropts = .paropts)
}
View
4 R/maply.r
@@ -12,11 +12,11 @@
#' maply(expand.grid(mean = 1:5, sd = 1:5), rnorm, n = 5)
#' maply(cbind(1:5, 1:5), rnorm, n = 5)
maply <- function(.data, .fun = NULL, ..., .expand = TRUE, .progress = "none",
- .drop = TRUE, .parallel = FALSE) {
+ .drop = TRUE, .parallel = FALSE, .paropts = NULL) {
if (is.matrix(.data) & !is.list(.data)) .data <- .matrix_to_df(.data)
f <- splat(.fun)
aaply(.data = .data, .margins = 1, .fun = f, ...,
.expand = .expand, .progress = .progress, .parallel = .parallel,
- .drop = drop)
+ .paropts = .paropts, .drop = .drop)
}
View
5 R/mdply.r
@@ -13,10 +13,11 @@
#' mdply(cbind(mean = 1:5, sd = 1:5), rnorm, n = 5)
#' mdply(cbind(mean = 1:5, sd = 1:5), as.data.frame(rnorm), n = 5)
mdply <- function(.data, .fun = NULL, ..., .expand = TRUE, .progress = "none",
- .parallel = FALSE) {
+ .parallel = FALSE, .paropts = NULL) {
if (is.matrix(.data) & !is.list(.data)) .data <- .matrix_to_df(.data)
f <- splat(.fun)
adply(.data = .data, .margins = 1, .fun = f, ...,
- .expand = .expand, .progress = .progress, .parallel = .parallel)
+ .expand = .expand, .progress = .progress, .parallel = .parallel,
+ .paropts = .paropts)
}
View
5 R/mlply.r
@@ -15,10 +15,11 @@
#' mlply(cbind(1:4, length = 4:1), seq)
#' mlply(cbind(1:4, by = 4:1), seq, to = 20)
mlply <- function(.data, .fun = NULL, ..., .expand = TRUE, .progress = "none",
- .parallel = FALSE) {
+ .parallel = FALSE, .paropts = NULL) {
if (is.matrix(.data) & !is.list(.data)) .data <- .matrix_to_df(.data)
f <- splat(.fun)
alply(.data = .data, .margins = 1, .fun = f, ...,
- .expand = .expand, .progress = .progress, .parallel = .parallel)
+ .expand = .expand, .progress = .progress, .parallel = .parallel,
+ .paropts = .paropts)
}
View
6 man-roxygen/ply.r
@@ -2,6 +2,12 @@
#' @param ... other arguments passed on to \code{.fun}
#' @param .progress name of the progress bar to use, see
#' \code{\link{create_progress_bar}}
+#' @param .paropts a list of additional options passed into
+#' the \code{\link[foreach]{foreach}} function when parallel computation
+#' is enabled. This is important if (for example) your code relies on
+#' external data or packages: use the \code{.export} and \code{.packages}
+#' arguments to supply them so that all cluster nodes have the correct
+#' environment set up for computing.
#' @param .parallel if \code{TRUE}, apply function in parallel, using parallel
#' backend provided by foreach
#' @keywords manip
View
11 man/a_ply.Rd
@@ -3,7 +3,8 @@
\title{Split array, apply function, and discard results.}
\usage{
a_ply(.data, .margins, .fun = NULL, ..., .expand = TRUE,
- .progress = "none", .print = FALSE, .parallel = FALSE)
+ .progress = "none", .print = FALSE, .parallel = FALSE,
+ .paropts = NULL)
}
\arguments{
\item{.fun}{function to apply to each piece}
@@ -13,6 +14,14 @@
\item{.progress}{name of the progress bar to use, see
\code{\link{create_progress_bar}}}
+ \item{.paropts}{a list of additional options passed into
+ the \code{\link[foreach]{foreach}} function when parallel
+ computation is enabled. This is important if (for
+ example) your code relies on external data or packages:
+ use the \code{.export} and \code{.packages} arguments to
+ supply them so that all cluster nodes have the correct
+ environment set up for computing.}
+
\item{.parallel}{if \code{TRUE}, apply function in
parallel, using parallel backend provided by foreach}
View
11 man/aaply.Rd
@@ -3,7 +3,8 @@
\title{Split array, apply function, and return results in an array.}
\usage{
aaply(.data, .margins, .fun = NULL, ..., .expand = TRUE,
- .progress = "none", .drop = TRUE, .parallel = FALSE)
+ .progress = "none", .drop = TRUE, .parallel = FALSE,
+ .paropts = NULL)
}
\arguments{
\item{.fun}{function to apply to each piece}
@@ -13,6 +14,14 @@
\item{.progress}{name of the progress bar to use, see
\code{\link{create_progress_bar}}}
+ \item{.paropts}{a list of additional options passed into
+ the \code{\link[foreach]{foreach}} function when parallel
+ computation is enabled. This is important if (for
+ example) your code relies on external data or packages:
+ use the \code{.export} and \code{.packages} arguments to
+ supply them so that all cluster nodes have the correct
+ environment set up for computing.}
+
\item{.parallel}{if \code{TRUE}, apply function in
parallel, using parallel backend provided by foreach}
View
10 man/adply.Rd
@@ -3,7 +3,7 @@
\title{Split array, apply function, and return results in a data frame.}
\usage{
adply(.data, .margins, .fun = NULL, ..., .expand = TRUE,
- .progress = "none", .parallel = FALSE)
+ .progress = "none", .parallel = FALSE, .paropts = NULL)
}
\arguments{
\item{.fun}{function to apply to each piece}
@@ -13,6 +13,14 @@
\item{.progress}{name of the progress bar to use, see
\code{\link{create_progress_bar}}}
+ \item{.paropts}{a list of additional options passed into
+ the \code{\link[foreach]{foreach}} function when parallel
+ computation is enabled. This is important if (for
+ example) your code relies on external data or packages:
+ use the \code{.export} and \code{.packages} arguments to
+ supply them so that all cluster nodes have the correct
+ environment set up for computing.}
+
\item{.parallel}{if \code{TRUE}, apply function in
parallel, using parallel backend provided by foreach}
View
10 man/alply.Rd
@@ -3,7 +3,7 @@
\title{Split array, apply function, and return results in a list.}
\usage{
alply(.data, .margins, .fun = NULL, ..., .expand = TRUE,
- .progress = "none", .parallel = FALSE)
+ .progress = "none", .parallel = FALSE, .paropts = NULL)
}
\arguments{
\item{.fun}{function to apply to each piece}
@@ -13,6 +13,14 @@
\item{.progress}{name of the progress bar to use, see
\code{\link{create_progress_bar}}}
+ \item{.paropts}{a list of additional options passed into
+ the \code{\link[foreach]{foreach}} function when parallel
+ computation is enabled. This is important if (for
+ example) your code relies on external data or packages:
+ use the \code{.export} and \code{.packages} arguments to
+ supply them so that all cluster nodes have the correct
+ environment set up for computing.}
+
\item{.parallel}{if \code{TRUE}, apply function in
parallel, using parallel backend provided by foreach}
View
10 man/d_ply.Rd
@@ -4,7 +4,7 @@
\usage{
d_ply(.data, .variables, .fun = NULL, ...,
.progress = "none", .drop = TRUE, .print = FALSE,
- .parallel = FALSE)
+ .parallel = FALSE, .paropts = NULL)
}
\arguments{
\item{.fun}{function to apply to each piece}
@@ -14,6 +14,14 @@
\item{.progress}{name of the progress bar to use, see
\code{\link{create_progress_bar}}}
+ \item{.paropts}{a list of additional options passed into
+ the \code{\link[foreach]{foreach}} function when parallel
+ computation is enabled. This is important if (for
+ example) your code relies on external data or packages:
+ use the \code{.export} and \code{.packages} arguments to
+ supply them so that all cluster nodes have the correct
+ environment set up for computing.}
+
\item{.parallel}{if \code{TRUE}, apply function in
parallel, using parallel backend provided by foreach}
View
10 man/daply.Rd
@@ -4,7 +4,7 @@
\usage{
daply(.data, .variables, .fun = NULL, ...,
.progress = "none", .drop_i = TRUE, .drop_o = TRUE,
- .parallel = FALSE)
+ .parallel = FALSE, .paropts = NULL)
}
\arguments{
\item{.fun}{function to apply to each piece}
@@ -14,6 +14,14 @@
\item{.progress}{name of the progress bar to use, see
\code{\link{create_progress_bar}}}
+ \item{.paropts}{a list of additional options passed into
+ the \code{\link[foreach]{foreach}} function when parallel
+ computation is enabled. This is important if (for
+ example) your code relies on external data or packages:
+ use the \code{.export} and \code{.packages} arguments to
+ supply them so that all cluster nodes have the correct
+ environment set up for computing.}
+
\item{.parallel}{if \code{TRUE}, apply function in
parallel, using parallel backend provided by foreach}
View
11 man/ddply.Rd
@@ -3,7 +3,8 @@
\title{Split data frame, apply function, and return results in a data frame.}
\usage{
ddply(.data, .variables, .fun = NULL, ...,
- .progress = "none", .drop = TRUE, .parallel = FALSE)
+ .progress = "none", .drop = TRUE, .parallel = FALSE,
+ .paropts = NULL)
}
\arguments{
\item{.fun}{function to apply to each piece}
@@ -13,6 +14,14 @@
\item{.progress}{name of the progress bar to use, see
\code{\link{create_progress_bar}}}
+ \item{.paropts}{a list of additional options passed into
+ the \code{\link[foreach]{foreach}} function when parallel
+ computation is enabled. This is important if (for
+ example) your code relies on external data or packages:
+ use the \code{.export} and \code{.packages} arguments to
+ supply them so that all cluster nodes have the correct
+ environment set up for computing.}
+
\item{.parallel}{if \code{TRUE}, apply function in
parallel, using parallel backend provided by foreach}
View
11 man/dlply.Rd
@@ -3,7 +3,8 @@
\title{Split data frame, apply function, and return results in a list.}
\usage{
dlply(.data, .variables, .fun = NULL, ...,
- .progress = "none", .drop = TRUE, .parallel = FALSE)
+ .progress = "none", .drop = TRUE, .parallel = FALSE,
+ .paropts = NULL)
}
\arguments{
\item{.fun}{function to apply to each piece}
@@ -13,6 +14,14 @@
\item{.progress}{name of the progress bar to use, see
\code{\link{create_progress_bar}}}
+ \item{.paropts}{a list of additional options passed into
+ the \code{\link[foreach]{foreach}} function when parallel
+ computation is enabled. This is important if (for
+ example) your code relies on external data or packages:
+ use the \code{.export} and \code{.packages} arguments to
+ supply them so that all cluster nodes have the correct
+ environment set up for computing.}
+
\item{.parallel}{if \code{TRUE}, apply function in
parallel, using parallel backend provided by foreach}
View
10 man/l_ply.Rd
@@ -3,7 +3,7 @@
\title{Split list, apply function, and discard results.}
\usage{
l_ply(.data, .fun = NULL, ..., .progress = "none",
- .print = FALSE, .parallel = FALSE)
+ .print = FALSE, .parallel = FALSE, .paropts = NULL)
}
\arguments{
\item{.fun}{function to apply to each piece}
@@ -13,6 +13,14 @@
\item{.progress}{name of the progress bar to use, see
\code{\link{create_progress_bar}}}
+ \item{.paropts}{a list of additional options passed into
+ the \code{\link[foreach]{foreach}} function when parallel
+ computation is enabled. This is important if (for
+ example) your code relies on external data or packages:
+ use the \code{.export} and \code{.packages} arguments to
+ supply them so that all cluster nodes have the correct
+ environment set up for computing.}
+
\item{.parallel}{if \code{TRUE}, apply function in
parallel, using parallel backend provided by foreach}
View
10 man/laply.Rd
@@ -3,7 +3,7 @@
\title{Split list, apply function, and return results in an array.}
\usage{
laply(.data, .fun = NULL, ..., .progress = "none",
- .drop = TRUE, .parallel = FALSE)
+ .drop = TRUE, .parallel = FALSE, .paropts = NULL)
}
\arguments{
\item{.fun}{function to apply to each piece}
@@ -13,6 +13,14 @@
\item{.progress}{name of the progress bar to use, see
\code{\link{create_progress_bar}}}
+ \item{.paropts}{a list of additional options passed into
+ the \code{\link[foreach]{foreach}} function when parallel
+ computation is enabled. This is important if (for
+ example) your code relies on external data or packages:
+ use the \code{.export} and \code{.packages} arguments to
+ supply them so that all cluster nodes have the correct
+ environment set up for computing.}
+
\item{.parallel}{if \code{TRUE}, apply function in
parallel, using parallel backend provided by foreach}
View
10 man/ldply.Rd
@@ -3,7 +3,7 @@
\title{Split list, apply function, and return results in a data frame.}
\usage{
ldply(.data, .fun = NULL, ..., .progress = "none",
- .parallel = FALSE)
+ .parallel = FALSE, .paropts = NULL)
}
\arguments{
\item{.fun}{function to apply to each piece}
@@ -13,6 +13,14 @@
\item{.progress}{name of the progress bar to use, see
\code{\link{create_progress_bar}}}
+ \item{.paropts}{a list of additional options passed into
+ the \code{\link[foreach]{foreach}} function when parallel
+ computation is enabled. This is important if (for
+ example) your code relies on external data or packages:
+ use the \code{.export} and \code{.packages} arguments to
+ supply them so that all cluster nodes have the correct
+ environment set up for computing.}
+
\item{.parallel}{if \code{TRUE}, apply function in
parallel, using parallel backend provided by foreach}
View
16 man/llply.Rd
@@ -13,6 +13,14 @@
\item{.progress}{name of the progress bar to use, see
\code{\link{create_progress_bar}}}
+ \item{.paropts}{a list of additional options passed into
+ the \code{\link[foreach]{foreach}} function when parallel
+ computation is enabled. This is important if (for
+ example) your code relies on external data or packages:
+ use the \code{.export} and \code{.packages} arguments to
+ supply them so that all cluster nodes have the correct
+ environment set up for computing.}
+
\item{.parallel}{if \code{TRUE}, apply function in
parallel, using parallel backend provided by foreach}
@@ -21,14 +29,6 @@
\item{.inform}{produce informative error messages? This
is turned off by by default because it substantially
slows processing speed, but is very useful for debugging}
-
- \item{.paropts}{a list of additional options passed into
- the \code{\link[foreach]{foreach}} function when parallel
- computation is enabled. This is important if (for
- example) your code relies on external data or packages:
- use the \code{.export} and \code{.packages} arguments to
- supply them so that all cluster nodes have the correct
- environment set up for computing.}
}
\value{
list of results
View
11 man/m_ply.Rd
@@ -3,7 +3,8 @@
\title{Call function with arguments in array or data frame, discarding results.}
\usage{
m_ply(.data, .fun = NULL, ..., .expand = TRUE,
- .progress = "none", .print = FALSE, .parallel = FALSE)
+ .progress = "none", .print = FALSE, .parallel = FALSE,
+ .paropts = NULL)
}
\arguments{
\item{.fun}{function to apply to each piece}
@@ -13,6 +14,14 @@
\item{.progress}{name of the progress bar to use, see
\code{\link{create_progress_bar}}}
+ \item{.paropts}{a list of additional options passed into
+ the \code{\link[foreach]{foreach}} function when parallel
+ computation is enabled. This is important if (for
+ example) your code relies on external data or packages:
+ use the \code{.export} and \code{.packages} arguments to
+ supply them so that all cluster nodes have the correct
+ environment set up for computing.}
+
\item{.parallel}{if \code{TRUE}, apply function in
parallel, using parallel backend provided by foreach}
View
11 man/maply.Rd
@@ -3,7 +3,8 @@
\title{Call function with arguments in array or data frame, returning an array.}
\usage{
maply(.data, .fun = NULL, ..., .expand = TRUE,
- .progress = "none", .drop = TRUE, .parallel = FALSE)
+ .progress = "none", .drop = TRUE, .parallel = FALSE,
+ .paropts = NULL)
}
\arguments{
\item{.fun}{function to apply to each piece}
@@ -13,6 +14,14 @@
\item{.progress}{name of the progress bar to use, see
\code{\link{create_progress_bar}}}
+ \item{.paropts}{a list of additional options passed into
+ the \code{\link[foreach]{foreach}} function when parallel
+ computation is enabled. This is important if (for
+ example) your code relies on external data or packages:
+ use the \code{.export} and \code{.packages} arguments to
+ supply them so that all cluster nodes have the correct
+ environment set up for computing.}
+
\item{.parallel}{if \code{TRUE}, apply function in
parallel, using parallel backend provided by foreach}
View
10 man/mdply.Rd
@@ -3,7 +3,7 @@
\title{Call function with arguments in array or data frame, returning a data frame.}
\usage{
mdply(.data, .fun = NULL, ..., .expand = TRUE,
- .progress = "none", .parallel = FALSE)
+ .progress = "none", .parallel = FALSE, .paropts = NULL)
}
\arguments{
\item{.fun}{function to apply to each piece}
@@ -13,6 +13,14 @@
\item{.progress}{name of the progress bar to use, see
\code{\link{create_progress_bar}}}
+ \item{.paropts}{a list of additional options passed into
+ the \code{\link[foreach]{foreach}} function when parallel
+ computation is enabled. This is important if (for
+ example) your code relies on external data or packages:
+ use the \code{.export} and \code{.packages} arguments to
+ supply them so that all cluster nodes have the correct
+ environment set up for computing.}
+
\item{.parallel}{if \code{TRUE}, apply function in
parallel, using parallel backend provided by foreach}
View
10 man/mlply.Rd
@@ -3,7 +3,7 @@
\title{Call function with arguments in array or data frame, returning a list.}
\usage{
mlply(.data, .fun = NULL, ..., .expand = TRUE,
- .progress = "none", .parallel = FALSE)
+ .progress = "none", .parallel = FALSE, .paropts = NULL)
}
\arguments{
\item{.fun}{function to apply to each piece}
@@ -13,6 +13,14 @@
\item{.progress}{name of the progress bar to use, see
\code{\link{create_progress_bar}}}
+ \item{.paropts}{a list of additional options passed into
+ the \code{\link[foreach]{foreach}} function when parallel
+ computation is enabled. This is important if (for
+ example) your code relies on external data or packages:
+ use the \code{.export} and \code{.packages} arguments to
+ supply them so that all cluster nodes have the correct
+ environment set up for computing.}
+
\item{.parallel}{if \code{TRUE}, apply function in
parallel, using parallel backend provided by foreach}

0 comments on commit a3c8618

Please sign in to comment.
Something went wrong with that request. Please try again.