Skip to content

Commit

Permalink
version 1.0.5
Browse files Browse the repository at this point in the history
  • Loading branch information
hadley authored and cran-robot committed Mar 5, 2021
1 parent 4c6baa3 commit 42d55ab
Show file tree
Hide file tree
Showing 169 changed files with 3,145 additions and 2,344 deletions.
13 changes: 7 additions & 6 deletions DESCRIPTION
@@ -1,7 +1,7 @@
Type: Package
Package: dplyr
Title: A Grammar of Data Manipulation
Version: 1.0.4
Version: 1.0.5
Authors@R:
c(person(given = "Hadley",
family = "Wickham",
Expand All @@ -27,23 +27,24 @@ License: MIT + file LICENSE
URL: https://dplyr.tidyverse.org, https://github.com/tidyverse/dplyr
BugReports: https://github.com/tidyverse/dplyr/issues
Depends: R (>= 3.3.0)
Imports: ellipsis, generics, glue (>= 1.3.2), lifecycle (>= 0.2.0),
magrittr (>= 1.5), methods, R6, rlang (>= 0.4.9), tibble (>=
Imports: ellipsis, generics, glue (>= 1.3.2), lifecycle (>= 1.0.0),
magrittr (>= 1.5), methods, R6, rlang (>= 0.4.10), tibble (>=
2.1.3), tidyselect (>= 1.1.0), utils, vctrs (>= 0.3.5)
Suggests: bench, broom, callr, covr, DBI, dbplyr (>= 1.4.3), knitr,
Lahman, lobstr, microbenchmark, nycflights13, purrr, rmarkdown,
RMySQL, RPostgreSQL, RSQLite, testthat (>= 2.1.0), withr
RMySQL, RPostgreSQL, RSQLite, testthat (>= 3.0.2), tidyr, withr
VignetteBuilder: knitr
Encoding: UTF-8
LazyData: true
RoxygenNote: 7.1.1
Config/testthat/edition: 3
NeedsCompilation: yes
Packaged: 2021-02-01 10:59:21 UTC; romainfrancois
Packaged: 2021-02-19 10:54:12 UTC; romainfrancois
Author: Hadley Wickham [aut, cre] (<https://orcid.org/0000-0003-4757-117X>),
Romain François [aut] (<https://orcid.org/0000-0002-2444-4226>),
Lionel Henry [aut],
Kirill Müller [aut] (<https://orcid.org/0000-0002-1416-3412>),
RStudio [cph, fnd]
Maintainer: Hadley Wickham <hadley@rstudio.com>
Repository: CRAN
Date/Publication: 2021-02-02 17:10:03 UTC
Date/Publication: 2021-03-05 11:00:02 UTC
252 changes: 126 additions & 126 deletions MD5

Large diffs are not rendered by default.

15 changes: 15 additions & 0 deletions NEWS.md
@@ -1,3 +1,18 @@
# dplyr 1.0.5

* Fixed edge case of `slice_sample()` when `weight_by=` is used and there
0 rows (#5729).

* `across()` can again use columns in functions defined inline (#5734).

* Using testthat 3rd edition.

* Fixed bugs introduced in `across()` in previous version (#5765).

* `group_by()` keeps attributes unrelated to the grouping (#5760).

* The `.cols=` argument of `if_any()` and `if_all()` defaults to `everything()`.

# dplyr 1.0.4

* Improved performance for `across()`. This makes `summarise(across())` and
Expand Down
34 changes: 10 additions & 24 deletions R/across.R
Expand Up @@ -6,7 +6,7 @@
#' functions like [summarise()] and [mutate()]. See `vignette("colwise")` for
#' more details.
#'
#' `if_any()` and `if_all()` are used with to apply the same
#' `if_any()` and `if_all()` apply the same
#' predicate function to a selection of columns and combine the
#' results into a single logical vector.
#'
Expand Down Expand Up @@ -147,7 +147,7 @@ across <- function(.cols = everything(), .fns = NULL, ..., .names = NULL) {

#' @rdname across
#' @export
if_any <- function(.cols, .fns = NULL, ..., .names = NULL) {
if_any <- function(.cols = everything(), .fns = NULL, ..., .names = NULL) {
df <- across({{ .cols }}, .fns = .fns, ..., .names = .names)
n <- nrow(df)
df <- vec_cast_common(!!!df, .to = logical())
Expand All @@ -156,7 +156,7 @@ if_any <- function(.cols, .fns = NULL, ..., .names = NULL) {

#' @rdname across
#' @export
if_all <- function(.cols, .fns = NULL, ..., .names = NULL) {
if_all <- function(.cols = everything(), .fns = NULL, ..., .names = NULL) {
df <- across({{ .cols }}, .fns = .fns, ..., .names = .names)
n <- nrow(df)
df <- vec_cast_common(!!!df, .to = logical())
Expand Down Expand Up @@ -213,7 +213,7 @@ across_setup <- function(cols, fns, names, key, .caller_env) {
mask <- peek_mask("across()")
value <- mask$across_cache_get(key)
if (is.null(value)) {
value <- across_setup_impl({{cols}},
value <- across_setup_impl({{ cols }},
fns = fns, names = names, .caller_env = .caller_env, mask = mask,
.top_level = FALSE
)
Expand All @@ -233,11 +233,10 @@ across_setup_impl <- function(cols, fns, names, .caller_env, mask = peek_mask("a
# across_setup() is only ever called on the first group anyway
# but perhaps it is time to review how across_cols() work
mask$set_current_group(1L)
} else {
# The real `across()` is evaluated in a data mask so we need to remove the
# mask layer from the quosure environment (#5460)
cols <- quo_set_env(cols, data_mask_top(quo_get_env(cols), recursive = FALSE, inherit = TRUE))
}
# `across()` is evaluated in a data mask so we need to remove the
# mask layer from the quosure environment (#5460)
cols <- quo_set_env(cols, data_mask_top(quo_get_env(cols), recursive = FALSE, inherit = TRUE))

vars <- tidyselect::eval_select(cols, data = mask$across_cols())
vars <- names(vars)
Expand Down Expand Up @@ -266,21 +265,7 @@ across_setup_impl <- function(cols, fns, names, .caller_env, mask = peek_mask("a
))
}

expr_protect <- function(x) {
call2(quote, x)
}

fns <- map(fns, function(fn) {
if (is_formula(fn) && .top_level) {
f_rhs(fn) <- call2(
quote(rlang::eval_tidy),
expr_protect(f_rhs(fn)),
data = mask$get_rlang_mask()
)
}
fn <- as_function(fn)
fn
})
fns <- map(fns, as_function)

# make sure fns has names, use number to replace unnamed
if (is.null(names(fns))) {
Expand Down Expand Up @@ -429,7 +414,8 @@ expand_quosure <- function(quo) {
# call top_across() instead of across()
quo_env <- quo_get_env(quo)
quo <- new_quosure(node_poke_car(quo_get_expr(quo), top_across), quo_env)
expressions <- eval_tidy(quo)
mask <- peek_mask()
expressions <- eval_tidy(quo, mask$get_rlang_mask(), mask$get_caller_env())
names_expressions <- names(expressions)

# process the results of top_across()
Expand Down
4 changes: 2 additions & 2 deletions R/conditions.R
Expand Up @@ -6,10 +6,10 @@ arg_name <- function(quos, index) {
name
}

cnd_bullet_cur_group_label <- function() {
cnd_bullet_cur_group_label <- function(what = "error") {
label <- cur_group_label()
if (label != "") {
glue("The error occurred in {label}.")
glue("The {what} occurred in {label}.")
}
}

Expand Down
6 changes: 5 additions & 1 deletion R/data-mask.R
Expand Up @@ -103,7 +103,7 @@ DataMask <- R6Class("DataMask",
},

set_current_group = function(group) {
parent.env(private$chops)$.current_group <- group
parent.env(private$chops)$.current_group[] <- group
},

full_data = function() {
Expand Down Expand Up @@ -192,6 +192,10 @@ DataMask <- R6Class("DataMask",

get_rlang_mask = function() {
private$mask
},

get_caller_env = function() {
private$caller
}

),
Expand Down
2 changes: 1 addition & 1 deletion R/deprec-do.r
Expand Up @@ -138,7 +138,7 @@ do.data.frame <- function(.data, ...) {
} else {
out <- map(args, function(arg) list(eval_tidy(arg, mask)))
names(out) <- names(args)
out <- tibble::as_tibble(out, validate = FALSE)
out <- tibble::as_tibble(out, .name_repair = "minimal")
}

out
Expand Down
2 changes: 1 addition & 1 deletion R/deprec-lazyeval.R
Expand Up @@ -298,7 +298,7 @@ select_.grouped_df <- function(.data, ..., .dots = list()) {
select_vars_ <- function(vars, args, include = chr(), exclude = chr()) {
lifecycle::deprecate_warn("0.7.0", "select_vars_()", "tidyselect::vars_select()")
args <- compat_lazy_dots(args, caller_env())
tidyselect::vars_select(vars, !!!args, include = include, exclude = exclude)
tidyselect::vars_select(vars, !!!args, .include = include, .exclude = exclude)
}

#' @export
Expand Down
5 changes: 3 additions & 2 deletions R/group-by.r
Expand Up @@ -149,7 +149,8 @@ group_by_prepare <- function(.data, ..., .add = FALSE, .dots = deprecated(), add
}

# If any calls, use mutate to add new columns, then group by those
computed_columns <- add_computed_columns(ungroup(.data), new_groups, "group_by")
computed_columns <- add_computed_columns(.data, new_groups, "group_by")

out <- computed_columns$data
group_names <- computed_columns$added_names

Expand Down Expand Up @@ -180,7 +181,7 @@ add_computed_columns <- function(.data, vars, .fn = "group_by") {
# TODO: use less of a hack
if (inherits(.data, "data.frame")) {
cols <- withCallingHandlers(
mutate_cols(.data, !!!vars),
mutate_cols(ungroup(.data), !!!vars),
error = function(e) {
abort(c(
glue("Problem adding computed columns in `{.fn}()`."),
Expand Down
2 changes: 1 addition & 1 deletion R/group_map.R
Expand Up @@ -34,7 +34,7 @@ as_group_map_function <- function(.f) {
#' @family grouping functions
#'
#' @param .data A grouped tibble
#' @param .f A function or formula to apply to each group. It must return a data frame.
#' @param .f A function or formula to apply to each group.
#'
#' If a __function__, it is used as is. It should have at least 2 formal arguments.
#'
Expand Down
3 changes: 2 additions & 1 deletion R/grouped-df.r
Expand Up @@ -91,7 +91,8 @@ compute_groups <- function(data, vars, drop = FALSE) {
groups <- tibble(!!!new_keys, ".rows" := new_rows)
}

structure(groups, .drop = drop)
attr(groups, ".drop") <- drop
groups
}

count_regroups <- function(code) {
Expand Down
31 changes: 25 additions & 6 deletions R/mutate.R
Expand Up @@ -236,7 +236,10 @@ mutate_cols <- function(.data, ...) {
for (i in seq_along(dots)) {
mask$across_cache_reset()

# get results from all the quosures that are expanded from ..i
# then ingest them after
quosures <- expand_quosure(dots[[i]])
quosures_results <- vector(mode = "list", length = length(quosures))

for (k in seq_along(quosures)) {
quo <- quosures[[k]]
Expand Down Expand Up @@ -283,11 +286,6 @@ mutate_cols <- function(.data, ...) {
}

if (is.null(chunks)) {
if (quo_data$is_named) {
name <- quo_data$name_given
new_columns[[name]] <- zap()
mask$remove(name)
}
next
}

Expand All @@ -305,6 +303,27 @@ mutate_cols <- function(.data, ...) {
}
}

quosures_results[[k]] <- list(result = result, chunks = chunks)
}


for (k in seq_along(quosures)) {
quo <- quosures[[k]]
quo_data <- attr(quo, "dplyr:::data")

quo_result <- quosures_results[[k]]
if (is.null(quo_result)) {
if (quo_data$is_named) {
name <- quo_data$name_given
new_columns[[name]] <- zap()
mask$remove(name)
}
next
}

result <- quo_result$result
chunks <- quo_result$chunks

if (!quo_data$is_named && is.data.frame(result)) {
new_columns[names(result)] <- result
mask$add_many(result, chunks)
Expand Down Expand Up @@ -393,7 +412,7 @@ mutate_cols <- function(.data, ...) {
cnd_bullet_header(),
i = conditionMessage(w),
i = cnd_bullet_input_info(),
i = cnd_bullet_cur_group_label()
i = cnd_bullet_cur_group_label(what = "warning")
))

# Cancel `w`
Expand Down
10 changes: 6 additions & 4 deletions R/slice.R
Expand Up @@ -239,7 +239,6 @@ slice_sample.data.frame <- function(.data, ..., n, prop, weight_by = NULL, repla
n = function(x, n) sample_int(n, size$n, replace = replace, wt = x),
prop = function(x, n) sample_int(n, size$prop * n, replace = replace, wt = x),
)

slice(.data, idx({{ weight_by }}, dplyr::n()))
}

Expand Down Expand Up @@ -327,10 +326,13 @@ check_slice_size <- function(n, prop, .slice_fn = "check_slice_size") {
}

sample_int <- function(n, size, replace = FALSE, wt = NULL) {
if (replace) {
sample.int(n, size, prob = wt, replace = TRUE)
if (!replace) {
size <- min(size, n)
}
if (size == 0L) {
integer(0)
} else {
sample.int(n, min(size, n), prob = wt)
sample.int(n, size, prob = wt, replace = replace)
}
}

Expand Down
15 changes: 15 additions & 0 deletions R/summarise.R
Expand Up @@ -225,6 +225,7 @@ summarise_cols <- function(.data, ...) {
mask$across_cache_reset()

quosures <- expand_quosure(dots[[i]])
quosures_results <- vector(mode = "list", length = length(quosures))

# with the previous part above, for each element of ... we can
# have either one or several quosures, each of them handled here:
Expand All @@ -246,6 +247,20 @@ summarise_cols <- function(.data, ...) {
)
chunks_k <- vec_cast_common(!!!chunks_k, .to = types_k)

quosures_results[[k]] <- list(chunks = chunks_k, types = types_k)
}

for (k in seq_along(quosures)) {
quo <- quosures[[k]]
quo_data <- attr(quo, "dplyr:::data")

quo_result <- quosures_results[[k]]
if (is.null(quo_result)) {
next
}
types_k <- quo_result$types
chunks_k <- quo_result$chunks

if (!quo_data$is_named && is.data.frame(types_k)) {
chunks_extracted <- .Call(dplyr_extract_chunks, chunks_k, types_k)

Expand Down
Binary file modified build/dplyr.pdf
Binary file not shown.

0 comments on commit 42d55ab

Please sign in to comment.