Skip to content
This repository
Browse code

Add .paropts settings to all **ply.

Closes #84.
  • Loading branch information...
commit a3c8618b5f8d50ac688a83e817777c8276dedcc6 1 parent 5fcf0bb
Hadley Wickham authored October 11, 2012
2  NEWS
... ...
@@ -1,6 +1,8 @@
1 1
 Version 1.7.1.99
2 2
 ------------------------------------------------------------------------------
3 3
 
  4
+* `**ply` gains a `.paropts` argumetns, a list of options that is passed onto `foreach` for controlling parallel computation.
  5
+
4 6
 * New function `here` makes it possible to use `**ply` + a function that uses non-standard evaluation (e.g. `summarise`, `mutate`, `subset`, `arrange`) inside a function.  (Thanks to Peter Meilstrup, #3)
5 7
 
6 8
 * The subsetting in `d*ply` has been considerably optimised: this will have a small impact unless you have a very large number of groups, in which case it will be considerably faster.
6  R/a_ply.r
@@ -7,9 +7,11 @@
7 7
 #' @template -_
8 8
 #' @export
9 9
 a_ply <- function(.data, .margins, .fun = NULL, ..., .expand = TRUE,
10  
-                  .progress = "none", .print = FALSE, .parallel = FALSE) {
  10
+                  .progress = "none", .print = FALSE, .parallel = FALSE,
  11
+                  .paropts = NULL) {
11 12
   pieces <- splitter_a(.data, .margins, .expand)
12 13
 
13 14
   l_ply(.data = pieces, .fun = .fun, ...,
14  
-    .progress = .progress, .print = .print, .parallel = .parallel)
  15
+    .progress = .progress, .print = .print, .parallel = .parallel,
  16
+    .paropts = .paropts)
15 17
 }
6  R/aaply.r
@@ -31,9 +31,11 @@
31 31
 #'
32 32
 #' aaply(ozone, 1:2, diff)
33 33
 aaply <- function(.data, .margins, .fun = NULL, ..., .expand = TRUE,
34  
-                  .progress = "none", .drop = TRUE, .parallel = FALSE) {
  34
+                  .progress = "none", .drop = TRUE, .parallel = FALSE,
  35
+                  .paropts = NULL) {
35 36
   pieces <- splitter_a(.data, .margins, .expand)
36 37
 
37 38
   laply(.data = pieces, .fun = .fun, ...,
38  
-    .progress = .progress, .drop = .drop, .parallel = .parallel)
  39
+    .progress = .progress, .drop = .drop, .parallel = .parallel,
  40
+    .paropts = .paropts)
39 41
 }
4  R/adply.r
@@ -8,9 +8,9 @@
8 8
 #' @template -d
9 9
 #' @export
10 10
 adply <- function(.data, .margins, .fun = NULL, ..., .expand = TRUE,
11  
-                  .progress = "none", .parallel = FALSE) {
  11
+                  .progress = "none", .parallel = FALSE, .paropts = NULL) {
12 12
   pieces <- splitter_a(.data, .margins, .expand)
13 13
 
14 14
   ldply(.data = pieces, .fun = .fun, ...,
15  
-    .progress = .progress, .parallel = .parallel)
  15
+    .progress = .progress, .parallel = .parallel, .paropts = .paropts)
16 16
 }
4  R/alply.r
@@ -14,9 +14,9 @@
14 14
 #' alply(ozone, 3, quantile)
15 15
 #' alply(ozone, 3, function(x) table(round(x)))
16 16
 alply <- function(.data, .margins, .fun = NULL, ..., .expand = TRUE,
17  
-                  .progress = "none", .parallel = FALSE) {
  17
+                  .progress = "none", .parallel = FALSE, .paropts = NULL) {
18 18
   pieces <- splitter_a(.data, .margins, .expand)
19 19
 
20 20
   llply(.data = pieces, .fun = .fun, ...,
21  
-    .progress = .progress, .parallel = .parallel)
  21
+    .progress = .progress, .parallel = .parallel, .paropts = .paropts)
22 22
 }
6  R/d_ply.r
@@ -7,10 +7,12 @@
7 7
 #' @template -_
8 8
 #' @export
9 9
 d_ply <- function(.data, .variables, .fun = NULL, ..., .progress = "none",
10  
-                  .drop = TRUE, .print = FALSE, .parallel = FALSE) {
  10
+                  .drop = TRUE, .print = FALSE, .parallel = FALSE,
  11
+                  .paropts = NULL) {
11 12
   .variables <- as.quoted(.variables)
12 13
   pieces <- splitter_d(.data, .variables, .drop = .drop)
13 14
 
14 15
   l_ply(.data = pieces, .fun = .fun, ...,
15  
-    .progress = .progress, .print = .print, .parallel = .parallel)
  16
+    .progress = .progress, .print = .print, .parallel = .parallel,
  17
+    .paropts = .paropts)
16 18
 }
5  R/daply.r
@@ -32,10 +32,11 @@
32 32
 #' daply(baseball[, 6:9], .(baseball$year), colwise(mean))
33 33
 #' daply(baseball, .(year), function(df) colwise(mean)(df[, 6:9]))
34 34
 daply <- function(.data, .variables, .fun = NULL, ..., .progress = "none",
35  
-  .drop_i = TRUE, .drop_o = TRUE, .parallel = FALSE) {
  35
+  .drop_i = TRUE, .drop_o = TRUE, .parallel = FALSE, .paropts = NULL) {
36 36
   .variables <- as.quoted(.variables)
37 37
   pieces <- splitter_d(.data, .variables, drop = .drop_i)
38 38
 
39 39
   laply(.data = pieces, .fun = .fun, ...,
40  
-    .progress = .progress, .drop = .drop_o, .parallel = .parallel)
  40
+    .progress = .progress, .drop = .drop_o, .parallel = .parallel,
  41
+    .paropts = .paropts)
41 42
 }
4  R/ddply.r
@@ -47,11 +47,11 @@
47 47
 #'  career_year = year - min(year) + 1
48 48
 #' )
49 49
 ddply <- function(.data, .variables, .fun = NULL, ..., .progress = "none",
50  
-                  .drop = TRUE, .parallel = FALSE) {
  50
+                  .drop = TRUE, .parallel = FALSE, .paropts = NULL) {
51 51
   if (empty(.data)) return(.data)
52 52
   .variables <- as.quoted(.variables)
53 53
   pieces <- splitter_d(.data, .variables, drop = .drop)
54 54
 
55 55
   ldply(.data = pieces, .fun = .fun, ...,
56  
-    .progress = .progress, .parallel = .parallel)
  56
+    .progress = .progress, .parallel = .parallel, .paropts = .paropts)
57 57
 }
4  R/dlply.r
@@ -20,10 +20,10 @@
20 20
 #' qual <- laply(models, function(mod) summary(mod)$r.squared)
21 21
 #' hist(qual)
22 22
 dlply <- function(.data, .variables, .fun = NULL, ..., .progress = "none",
23  
-                  .drop = TRUE, .parallel = FALSE) {
  23
+                  .drop = TRUE, .parallel = FALSE, .paropts = NULL) {
24 24
   .variables <- as.quoted(.variables)
25 25
   pieces <- splitter_d(.data, .variables, drop = .drop)
26 26
 
27 27
   llply(.data = pieces, .fun = .fun, ...,
28  
-    .progress = .progress, .parallel = .parallel)
  28
+    .progress = .progress, .parallel = .parallel, .paropts = .paropts)
29 29
 }
10  R/l_ply.r
@@ -7,7 +7,7 @@
7 7
 #' @template -_
8 8
 #' @export
9 9
 l_ply <- function(.data, .fun = NULL, ..., .progress = "none", .print = FALSE,
10  
-                  .parallel = FALSE) {
  10
+                  .parallel = FALSE, .paropts = NULL) {
11 11
   if (is.character(.fun) || is.list(.fun)) .fun <- each(.fun)
12 12
   if (!is.function(.fun)) stop(".fun is not a function.")
13 13
 
@@ -20,8 +20,12 @@ l_ply <- function(.data, .fun = NULL, ..., .progress = "none", .print = FALSE,
20 20
     if (.progress != "none") message("Progress disabled for parallel processing")
21 21
 
22 22
     setup_parallel()
23  
-    ignore <- function(...) NULL
24  
-    foreach(d = .data, .combine = ignore) %dopar% .fun(d, ...)
  23
+    .paropts$.combine <- function(...) NULL
  24
+    fe_call <- as.call(c(list(as.name("foreach"), d = as.name(".data")),
  25
+      .paropts))
  26
+    fe <- eval(fe_call)
  27
+
  28
+    fe %dopar% .fun(d, ...)
25 29
   } else {
26 30
     .data <- as.list(.data)
27 31
     for(i in seq_along(.data)) {
4  R/laply.r
@@ -22,13 +22,13 @@
22 22
 #' laply(seq_len(10), rep, times = 4)
23 23
 #' laply(seq_len(10), matrix, nrow = 2, ncol = 2)
24 24
 laply <-  function(.data, .fun = NULL, ..., .progress = "none", .drop = TRUE,
25  
-                  .parallel = FALSE) {
  25
+                  .parallel = FALSE, .paropts = NULL) {
26 26
   if (is.character(.fun)) .fun <- do.call("each", as.list(.fun))
27 27
   if (!is.function(.fun)) stop(".fun is not a function.")
28 28
 
29 29
   if (!inherits(.data, "split")) .data <- as.list(.data)
30 30
   res <- llply(.data = .data, .fun = .fun, ...,
31  
-    .progress = .progress, .parallel = .parallel)
  31
+    .progress = .progress, .parallel = .parallel, .paropts = .paropts)
32 32
 
33 33
   list_to_array(res, attr(.data, "split_labels"), .drop)
34 34
 }
4  R/ldply.r
@@ -8,10 +8,10 @@
8 8
 #' @template -d
9 9
 #' @export
10 10
 ldply <- function(.data, .fun = NULL, ..., .progress = "none",
11  
-                  .parallel = FALSE) {
  11
+                  .parallel = FALSE, .paropts = NULL) {
12 12
   if (!inherits(.data, "split")) .data <- as.list(.data)
13 13
   res <- llply(.data = .data, .fun = .fun, ...,
14  
-    .progress = .progress, .parallel = .parallel)
  14
+    .progress = .progress, .parallel = .parallel, .paropts = .paropts)
15 15
 
16 16
   list_to_dataframe(res, attr(.data, "split_labels"))
17 17
 }
11  R/llply.r
@@ -11,12 +11,6 @@
11 11
 #' @param .inform produce informative error messages?  This is turned off by
12 12
 #'   by default because it substantially slows processing speed, but is very
13 13
 #'   useful for debugging
14  
-#' @param .paropts a list of additional options passed into
15  
-#'   the \code{\link[foreach]{foreach}} function when parallel computation
16  
-#'   is enabled.  This is important if (for example) your code relies on
17  
-#'   external data or packages: use the \code{.export} and \code{.packages}
18  
-#'   arguments to supply them so that all cluster nodes have the correct
19  
-#'   environment set up for computing.
20 14
 #' @export
21 15
 #' @examples
22 16
 #' llply(llply(mtcars, round), table)
@@ -76,7 +70,10 @@ llply <- function(.data, .fun = NULL, ..., .progress = "none", .inform = FALSE,
76 70
   }
77 71
   if (.parallel) {
78 72
     setup_parallel()
79  
-    fe <- parallel_fe(n, .paropts)
  73
+
  74
+    i <- seq_len(n)
  75
+    fe_call <- as.call(c(list(as.name("foreach"), i = i), .paropts))
  76
+    fe <- eval(fe_call)
80 77
 
81 78
     result <- fe %dopar% do.ply(i)
82 79
   } else {
5  R/m_ply.r
@@ -8,10 +8,11 @@
8 8
 #' @template -_
9 9
 #' @export
10 10
 m_ply <- function(.data, .fun = NULL, ..., .expand = TRUE, .progress = "none",
11  
-                  .print = FALSE, .parallel = FALSE ) {
  11
+                  .print = FALSE, .parallel = FALSE, .paropts = NULL) {
12 12
   if (is.matrix(.data) & !is.list(.data)) .data <- .matrix_to_df(.data)
13 13
 
14 14
   f <- splat(.fun)
15 15
   a_ply(.data = .data, .margins = 1, .fun = f, ..., .expand = .expand,
16  
-    .progress = .progress, .print = .print, .parallel = .parallel)
  16
+    .progress = .progress, .print = .print, .parallel = .parallel,
  17
+    .paropts = .paropts)
17 18
 }
4  R/maply.r
@@ -12,11 +12,11 @@
12 12
 #' maply(expand.grid(mean = 1:5, sd = 1:5), rnorm, n = 5)
13 13
 #' maply(cbind(1:5, 1:5), rnorm, n = 5)
14 14
 maply <- function(.data, .fun = NULL, ..., .expand = TRUE, .progress = "none",
15  
-                  .drop = TRUE, .parallel = FALSE) {
  15
+                  .drop = TRUE, .parallel = FALSE, .paropts = NULL) {
16 16
   if (is.matrix(.data) & !is.list(.data)) .data <- .matrix_to_df(.data)
17 17
 
18 18
   f <- splat(.fun)
19 19
   aaply(.data = .data, .margins = 1, .fun = f, ...,
20 20
     .expand = .expand, .progress = .progress, .parallel = .parallel,
21  
-    .drop = drop)
  21
+    .paropts = .paropts, .drop = .drop)
22 22
 }
5  R/mdply.r
@@ -13,10 +13,11 @@
13 13
 #' mdply(cbind(mean = 1:5, sd = 1:5), rnorm, n = 5)
14 14
 #' mdply(cbind(mean = 1:5, sd = 1:5), as.data.frame(rnorm), n = 5)
15 15
 mdply <- function(.data, .fun = NULL, ..., .expand = TRUE, .progress = "none",
16  
-                  .parallel = FALSE) {
  16
+                  .parallel = FALSE, .paropts = NULL) {
17 17
   if (is.matrix(.data) & !is.list(.data)) .data <- .matrix_to_df(.data)
18 18
 
19 19
   f <- splat(.fun)
20 20
   adply(.data = .data, .margins = 1, .fun = f, ...,
21  
-    .expand = .expand, .progress = .progress, .parallel = .parallel)
  21
+    .expand = .expand, .progress = .progress, .parallel = .parallel,
  22
+    .paropts = .paropts)
22 23
 }
5  R/mlply.r
@@ -15,10 +15,11 @@
15 15
 #' mlply(cbind(1:4, length = 4:1), seq)
16 16
 #' mlply(cbind(1:4, by = 4:1), seq, to = 20)
17 17
 mlply <- function(.data, .fun = NULL, ..., .expand = TRUE, .progress = "none",
18  
-                  .parallel = FALSE) {
  18
+                  .parallel = FALSE, .paropts = NULL) {
19 19
   if (is.matrix(.data) & !is.list(.data)) .data <- .matrix_to_df(.data)
20 20
 
21 21
   f <- splat(.fun)
22 22
   alply(.data = .data, .margins = 1, .fun = f, ...,
23  
-    .expand = .expand, .progress = .progress, .parallel = .parallel)
  23
+    .expand = .expand, .progress = .progress, .parallel = .parallel,
  24
+    .paropts = .paropts)
24 25
 }
6  man-roxygen/ply.r
@@ -2,6 +2,12 @@
2 2
 #' @param ... other arguments passed on to \code{.fun}
3 3
 #' @param .progress name of the progress bar to use, see
4 4
 #'   \code{\link{create_progress_bar}}
  5
+#' @param .paropts a list of additional options passed into
  6
+#'   the \code{\link[foreach]{foreach}} function when parallel computation
  7
+#'   is enabled.  This is important if (for example) your code relies on
  8
+#'   external data or packages: use the \code{.export} and \code{.packages}
  9
+#'   arguments to supply them so that all cluster nodes have the correct
  10
+#'   environment set up for computing.
5 11
 #' @param .parallel if \code{TRUE}, apply function in parallel, using parallel
6 12
 #'   backend provided by foreach
7 13
 #' @keywords manip
11  man/a_ply.Rd
@@ -3,7 +3,8 @@
3 3
 \title{Split array, apply function, and discard results.}
4 4
 \usage{
5 5
   a_ply(.data, .margins, .fun = NULL, ..., .expand = TRUE,
6  
-    .progress = "none", .print = FALSE, .parallel = FALSE)
  6
+    .progress = "none", .print = FALSE, .parallel = FALSE,
  7
+    .paropts = NULL)
7 8
 }
8 9
 \arguments{
9 10
   \item{.fun}{function to apply to each piece}
@@ -13,6 +14,14 @@
13 14
   \item{.progress}{name of the progress bar to use, see
14 15
   \code{\link{create_progress_bar}}}
15 16
 
  17
+  \item{.paropts}{a list of additional options passed into
  18
+  the \code{\link[foreach]{foreach}} function when parallel
  19
+  computation is enabled.  This is important if (for
  20
+  example) your code relies on external data or packages:
  21
+  use the \code{.export} and \code{.packages} arguments to
  22
+  supply them so that all cluster nodes have the correct
  23
+  environment set up for computing.}
  24
+
16 25
   \item{.parallel}{if \code{TRUE}, apply function in
17 26
   parallel, using parallel backend provided by foreach}
18 27
 
11  man/aaply.Rd
@@ -3,7 +3,8 @@
3 3
 \title{Split array, apply function, and return results in an array.}
4 4
 \usage{
5 5
   aaply(.data, .margins, .fun = NULL, ..., .expand = TRUE,
6  
-    .progress = "none", .drop = TRUE, .parallel = FALSE)
  6
+    .progress = "none", .drop = TRUE, .parallel = FALSE,
  7
+    .paropts = NULL)
7 8
 }
8 9
 \arguments{
9 10
   \item{.fun}{function to apply to each piece}
@@ -13,6 +14,14 @@
13 14
   \item{.progress}{name of the progress bar to use, see
14 15
   \code{\link{create_progress_bar}}}
15 16
 
  17
+  \item{.paropts}{a list of additional options passed into
  18
+  the \code{\link[foreach]{foreach}} function when parallel
  19
+  computation is enabled.  This is important if (for
  20
+  example) your code relies on external data or packages:
  21
+  use the \code{.export} and \code{.packages} arguments to
  22
+  supply them so that all cluster nodes have the correct
  23
+  environment set up for computing.}
  24
+
16 25
   \item{.parallel}{if \code{TRUE}, apply function in
17 26
   parallel, using parallel backend provided by foreach}
18 27
 
10  man/adply.Rd
@@ -3,7 +3,7 @@
3 3
 \title{Split array, apply function, and return results in a data frame.}
4 4
 \usage{
5 5
   adply(.data, .margins, .fun = NULL, ..., .expand = TRUE,
6  
-    .progress = "none", .parallel = FALSE)
  6
+    .progress = "none", .parallel = FALSE, .paropts = NULL)
7 7
 }
8 8
 \arguments{
9 9
   \item{.fun}{function to apply to each piece}
@@ -13,6 +13,14 @@
13 13
   \item{.progress}{name of the progress bar to use, see
14 14
   \code{\link{create_progress_bar}}}
15 15
 
  16
+  \item{.paropts}{a list of additional options passed into
  17
+  the \code{\link[foreach]{foreach}} function when parallel
  18
+  computation is enabled.  This is important if (for
  19
+  example) your code relies on external data or packages:
  20
+  use the \code{.export} and \code{.packages} arguments to
  21
+  supply them so that all cluster nodes have the correct
  22
+  environment set up for computing.}
  23
+
16 24
   \item{.parallel}{if \code{TRUE}, apply function in
17 25
   parallel, using parallel backend provided by foreach}
18 26
 
10  man/alply.Rd
@@ -3,7 +3,7 @@
3 3
 \title{Split array, apply function, and return results in a list.}
4 4
 \usage{
5 5
   alply(.data, .margins, .fun = NULL, ..., .expand = TRUE,
6  
-    .progress = "none", .parallel = FALSE)
  6
+    .progress = "none", .parallel = FALSE, .paropts = NULL)
7 7
 }
8 8
 \arguments{
9 9
   \item{.fun}{function to apply to each piece}
@@ -13,6 +13,14 @@
13 13
   \item{.progress}{name of the progress bar to use, see
14 14
   \code{\link{create_progress_bar}}}
15 15
 
  16
+  \item{.paropts}{a list of additional options passed into
  17
+  the \code{\link[foreach]{foreach}} function when parallel
  18
+  computation is enabled.  This is important if (for
  19
+  example) your code relies on external data or packages:
  20
+  use the \code{.export} and \code{.packages} arguments to
  21
+  supply them so that all cluster nodes have the correct
  22
+  environment set up for computing.}
  23
+
16 24
   \item{.parallel}{if \code{TRUE}, apply function in
17 25
   parallel, using parallel backend provided by foreach}
18 26
 
10  man/d_ply.Rd
@@ -4,7 +4,7 @@
4 4
 \usage{
5 5
   d_ply(.data, .variables, .fun = NULL, ...,
6 6
     .progress = "none", .drop = TRUE, .print = FALSE,
7  
-    .parallel = FALSE)
  7
+    .parallel = FALSE, .paropts = NULL)
8 8
 }
9 9
 \arguments{
10 10
   \item{.fun}{function to apply to each piece}
@@ -14,6 +14,14 @@
14 14
   \item{.progress}{name of the progress bar to use, see
15 15
   \code{\link{create_progress_bar}}}
16 16
 
  17
+  \item{.paropts}{a list of additional options passed into
  18
+  the \code{\link[foreach]{foreach}} function when parallel
  19
+  computation is enabled.  This is important if (for
  20
+  example) your code relies on external data or packages:
  21
+  use the \code{.export} and \code{.packages} arguments to
  22
+  supply them so that all cluster nodes have the correct
  23
+  environment set up for computing.}
  24
+
17 25
   \item{.parallel}{if \code{TRUE}, apply function in
18 26
   parallel, using parallel backend provided by foreach}
19 27
 
10  man/daply.Rd
@@ -4,7 +4,7 @@
4 4
 \usage{
5 5
   daply(.data, .variables, .fun = NULL, ...,
6 6
     .progress = "none", .drop_i = TRUE, .drop_o = TRUE,
7  
-    .parallel = FALSE)
  7
+    .parallel = FALSE, .paropts = NULL)
8 8
 }
9 9
 \arguments{
10 10
   \item{.fun}{function to apply to each piece}
@@ -14,6 +14,14 @@
14 14
   \item{.progress}{name of the progress bar to use, see
15 15
   \code{\link{create_progress_bar}}}
16 16
 
  17
+  \item{.paropts}{a list of additional options passed into
  18
+  the \code{\link[foreach]{foreach}} function when parallel
  19
+  computation is enabled.  This is important if (for
  20
+  example) your code relies on external data or packages:
  21
+  use the \code{.export} and \code{.packages} arguments to
  22
+  supply them so that all cluster nodes have the correct
  23
+  environment set up for computing.}
  24
+
17 25
   \item{.parallel}{if \code{TRUE}, apply function in
18 26
   parallel, using parallel backend provided by foreach}
19 27
 
11  man/ddply.Rd
@@ -3,7 +3,8 @@
3 3
 \title{Split data frame, apply function, and return results in a data frame.}
4 4
 \usage{
5 5
   ddply(.data, .variables, .fun = NULL, ...,
6  
-    .progress = "none", .drop = TRUE, .parallel = FALSE)
  6
+    .progress = "none", .drop = TRUE, .parallel = FALSE,
  7
+    .paropts = NULL)
7 8
 }
8 9
 \arguments{
9 10
   \item{.fun}{function to apply to each piece}
@@ -13,6 +14,14 @@
13 14
   \item{.progress}{name of the progress bar to use, see
14 15
   \code{\link{create_progress_bar}}}
15 16
 
  17
+  \item{.paropts}{a list of additional options passed into
  18
+  the \code{\link[foreach]{foreach}} function when parallel
  19
+  computation is enabled.  This is important if (for
  20
+  example) your code relies on external data or packages:
  21
+  use the \code{.export} and \code{.packages} arguments to
  22
+  supply them so that all cluster nodes have the correct
  23
+  environment set up for computing.}
  24
+
16 25
   \item{.parallel}{if \code{TRUE}, apply function in
17 26
   parallel, using parallel backend provided by foreach}
18 27
 
11  man/dlply.Rd
@@ -3,7 +3,8 @@
3 3
 \title{Split data frame, apply function, and return results in a list.}
4 4
 \usage{
5 5
   dlply(.data, .variables, .fun = NULL, ...,
6  
-    .progress = "none", .drop = TRUE, .parallel = FALSE)
  6
+    .progress = "none", .drop = TRUE, .parallel = FALSE,
  7
+    .paropts = NULL)
7 8
 }
8 9
 \arguments{
9 10
   \item{.fun}{function to apply to each piece}
@@ -13,6 +14,14 @@
13 14
   \item{.progress}{name of the progress bar to use, see
14 15
   \code{\link{create_progress_bar}}}
15 16
 
  17
+  \item{.paropts}{a list of additional options passed into
  18
+  the \code{\link[foreach]{foreach}} function when parallel
  19
+  computation is enabled.  This is important if (for
  20
+  example) your code relies on external data or packages:
  21
+  use the \code{.export} and \code{.packages} arguments to
  22
+  supply them so that all cluster nodes have the correct
  23
+  environment set up for computing.}
  24
+
16 25
   \item{.parallel}{if \code{TRUE}, apply function in
17 26
   parallel, using parallel backend provided by foreach}
18 27
 
10  man/l_ply.Rd
@@ -3,7 +3,7 @@
3 3
 \title{Split list, apply function, and discard results.}
4 4
 \usage{
5 5
   l_ply(.data, .fun = NULL, ..., .progress = "none",
6  
-    .print = FALSE, .parallel = FALSE)
  6
+    .print = FALSE, .parallel = FALSE, .paropts = NULL)
7 7
 }
8 8
 \arguments{
9 9
   \item{.fun}{function to apply to each piece}
@@ -13,6 +13,14 @@
13 13
   \item{.progress}{name of the progress bar to use, see
14 14
   \code{\link{create_progress_bar}}}
15 15
 
  16
+  \item{.paropts}{a list of additional options passed into
  17
+  the \code{\link[foreach]{foreach}} function when parallel
  18
+  computation is enabled.  This is important if (for
  19
+  example) your code relies on external data or packages:
  20
+  use the \code{.export} and \code{.packages} arguments to
  21
+  supply them so that all cluster nodes have the correct
  22
+  environment set up for computing.}
  23
+
16 24
   \item{.parallel}{if \code{TRUE}, apply function in
17 25
   parallel, using parallel backend provided by foreach}
18 26
 
10  man/laply.Rd
@@ -3,7 +3,7 @@
3 3
 \title{Split list, apply function, and return results in an array.}
4 4
 \usage{
5 5
   laply(.data, .fun = NULL, ..., .progress = "none",
6  
-    .drop = TRUE, .parallel = FALSE)
  6
+    .drop = TRUE, .parallel = FALSE, .paropts = NULL)
7 7
 }
8 8
 \arguments{
9 9
   \item{.fun}{function to apply to each piece}
@@ -13,6 +13,14 @@
13 13
   \item{.progress}{name of the progress bar to use, see
14 14
   \code{\link{create_progress_bar}}}
15 15
 
  16
+  \item{.paropts}{a list of additional options passed into
  17
+  the \code{\link[foreach]{foreach}} function when parallel
  18
+  computation is enabled.  This is important if (for
  19
+  example) your code relies on external data or packages:
  20
+  use the \code{.export} and \code{.packages} arguments to
  21
+  supply them so that all cluster nodes have the correct
  22
+  environment set up for computing.}
  23
+
16 24
   \item{.parallel}{if \code{TRUE}, apply function in
17 25
   parallel, using parallel backend provided by foreach}
18 26
 
10  man/ldply.Rd
@@ -3,7 +3,7 @@
3 3
 \title{Split list, apply function, and return results in a data frame.}
4 4
 \usage{
5 5
   ldply(.data, .fun = NULL, ..., .progress = "none",
6  
-    .parallel = FALSE)
  6
+    .parallel = FALSE, .paropts = NULL)
7 7
 }
8 8
 \arguments{
9 9
   \item{.fun}{function to apply to each piece}
@@ -13,6 +13,14 @@
13 13
   \item{.progress}{name of the progress bar to use, see
14 14
   \code{\link{create_progress_bar}}}
15 15
 
  16
+  \item{.paropts}{a list of additional options passed into
  17
+  the \code{\link[foreach]{foreach}} function when parallel
  18
+  computation is enabled.  This is important if (for
  19
+  example) your code relies on external data or packages:
  20
+  use the \code{.export} and \code{.packages} arguments to
  21
+  supply them so that all cluster nodes have the correct
  22
+  environment set up for computing.}
  23
+
16 24
   \item{.parallel}{if \code{TRUE}, apply function in
17 25
   parallel, using parallel backend provided by foreach}
18 26
 
16  man/llply.Rd
@@ -13,6 +13,14 @@
13 13
   \item{.progress}{name of the progress bar to use, see
14 14
   \code{\link{create_progress_bar}}}
15 15
 
  16
+  \item{.paropts}{a list of additional options passed into
  17
+  the \code{\link[foreach]{foreach}} function when parallel
  18
+  computation is enabled.  This is important if (for
  19
+  example) your code relies on external data or packages:
  20
+  use the \code{.export} and \code{.packages} arguments to
  21
+  supply them so that all cluster nodes have the correct
  22
+  environment set up for computing.}
  23
+
16 24
   \item{.parallel}{if \code{TRUE}, apply function in
17 25
   parallel, using parallel backend provided by foreach}
18 26
 
@@ -21,14 +29,6 @@
21 29
   \item{.inform}{produce informative error messages?  This
22 30
   is turned off by by default because it substantially
23 31
   slows processing speed, but is very useful for debugging}
24  
-
25  
-  \item{.paropts}{a list of additional options passed into
26  
-  the \code{\link[foreach]{foreach}} function when parallel
27  
-  computation is enabled.  This is important if (for
28  
-  example) your code relies on external data or packages:
29  
-  use the \code{.export} and \code{.packages} arguments to
30  
-  supply them so that all cluster nodes have the correct
31  
-  environment set up for computing.}
32 32
 }
33 33
 \value{
34 34
   list of results
11  man/m_ply.Rd
@@ -3,7 +3,8 @@
3 3
 \title{Call function with arguments in array or data frame, discarding results.}
4 4
 \usage{
5 5
   m_ply(.data, .fun = NULL, ..., .expand = TRUE,
6  
-    .progress = "none", .print = FALSE, .parallel = FALSE)
  6
+    .progress = "none", .print = FALSE, .parallel = FALSE,
  7
+    .paropts = NULL)
7 8
 }
8 9
 \arguments{
9 10
   \item{.fun}{function to apply to each piece}
@@ -13,6 +14,14 @@
13 14
   \item{.progress}{name of the progress bar to use, see
14 15
   \code{\link{create_progress_bar}}}
15 16
 
  17
+  \item{.paropts}{a list of additional options passed into
  18
+  the \code{\link[foreach]{foreach}} function when parallel
  19
+  computation is enabled.  This is important if (for
  20
+  example) your code relies on external data or packages:
  21
+  use the \code{.export} and \code{.packages} arguments to
  22
+  supply them so that all cluster nodes have the correct
  23
+  environment set up for computing.}
  24
+
16 25
   \item{.parallel}{if \code{TRUE}, apply function in
17 26
   parallel, using parallel backend provided by foreach}
18 27
 
11  man/maply.Rd
@@ -3,7 +3,8 @@
3 3
 \title{Call function with arguments in array or data frame, returning an array.}
4 4
 \usage{
5 5
   maply(.data, .fun = NULL, ..., .expand = TRUE,
6  
-    .progress = "none", .drop = TRUE, .parallel = FALSE)
  6
+    .progress = "none", .drop = TRUE, .parallel = FALSE,
  7
+    .paropts = NULL)
7 8
 }
8 9
 \arguments{
9 10
   \item{.fun}{function to apply to each piece}
@@ -13,6 +14,14 @@
13 14
   \item{.progress}{name of the progress bar to use, see
14 15
   \code{\link{create_progress_bar}}}
15 16
 
  17
+  \item{.paropts}{a list of additional options passed into
  18
+  the \code{\link[foreach]{foreach}} function when parallel
  19
+  computation is enabled.  This is important if (for
  20
+  example) your code relies on external data or packages:
  21
+  use the \code{.export} and \code{.packages} arguments to
  22
+  supply them so that all cluster nodes have the correct
  23
+  environment set up for computing.}
  24
+
16 25
   \item{.parallel}{if \code{TRUE}, apply function in
17 26
   parallel, using parallel backend provided by foreach}
18 27
 
10  man/mdply.Rd
@@ -3,7 +3,7 @@
3 3
 \title{Call function with arguments in array or data frame, returning a data frame.}
4 4
 \usage{
5 5
   mdply(.data, .fun = NULL, ..., .expand = TRUE,
6  
-    .progress = "none", .parallel = FALSE)
  6
+    .progress = "none", .parallel = FALSE, .paropts = NULL)
7 7
 }
8 8
 \arguments{
9 9
   \item{.fun}{function to apply to each piece}
@@ -13,6 +13,14 @@
13 13
   \item{.progress}{name of the progress bar to use, see
14 14
   \code{\link{create_progress_bar}}}
15 15
 
  16
+  \item{.paropts}{a list of additional options passed into
  17
+  the \code{\link[foreach]{foreach}} function when parallel
  18
+  computation is enabled.  This is important if (for
  19
+  example) your code relies on external data or packages:
  20
+  use the \code{.export} and \code{.packages} arguments to
  21
+  supply them so that all cluster nodes have the correct
  22
+  environment set up for computing.}
  23
+
16 24
   \item{.parallel}{if \code{TRUE}, apply function in
17 25
   parallel, using parallel backend provided by foreach}
18 26
 
10  man/mlply.Rd
@@ -3,7 +3,7 @@
3 3
 \title{Call function with arguments in array or data frame, returning a list.}
4 4
 \usage{
5 5
   mlply(.data, .fun = NULL, ..., .expand = TRUE,
6  
-    .progress = "none", .parallel = FALSE)
  6
+    .progress = "none", .parallel = FALSE, .paropts = NULL)
7 7
 }
8 8
 \arguments{
9 9
   \item{.fun}{function to apply to each piece}
@@ -13,6 +13,14 @@
13 13
   \item{.progress}{name of the progress bar to use, see
14 14
   \code{\link{create_progress_bar}}}
15 15
 
  16
+  \item{.paropts}{a list of additional options passed into
  17
+  the \code{\link[foreach]{foreach}} function when parallel
  18
+  computation is enabled.  This is important if (for
  19
+  example) your code relies on external data or packages:
  20
+  use the \code{.export} and \code{.packages} arguments to
  21
+  supply them so that all cluster nodes have the correct
  22
+  environment set up for computing.}
  23
+
16 24
   \item{.parallel}{if \code{TRUE}, apply function in
17 25
   parallel, using parallel backend provided by foreach}
18 26
 

0 notes on commit a3c8618

Please sign in to comment.
Something went wrong with that request. Please try again.