Skip to content

Commit

Permalink
Auto merge of rust-lang#73235 - Dylan-DPC:rollup-zp8oxhg, r=Dylan-DPC
Browse files Browse the repository at this point in the history
Rollup of 11 pull requests

Successful merges:

 - rust-lang#72380 (Fix `is_const_context`, update `check_for_cast`)
 - rust-lang#72941 (Ensure stack when building MIR for matches)
 - rust-lang#72976 (Clean up E0642 explanation)
 - rust-lang#73080 (doc/rustdoc: Fix incorrect external_doc feature flag)
 - rust-lang#73155 (save_analysis: better handle paths and functions signature)
 - rust-lang#73164 (Add new E0762 error code)
 - rust-lang#73172 (Fix more clippy warnings)
 - rust-lang#73181 (Automatically prioritize unsoundness issues)
 - rust-lang#73183 (Support proc macros in intra doc link resolution)
 - rust-lang#73208 (Fix doctest template)
 - rust-lang#73219 (x.py: with --json-output, forward cargo's JSON)

Failed merges:

r? @ghost
  • Loading branch information
bors committed Jun 11, 2020
2 parents 3ddf480 + ba0a8d2 commit 50c0192
Show file tree
Hide file tree
Showing 44 changed files with 5,807 additions and 415 deletions.
8 changes: 7 additions & 1 deletion src/bootstrap/compile.rs
Expand Up @@ -983,7 +983,13 @@ pub fn stream_cargo(
for line in stdout.lines() {
let line = t!(line);
match serde_json::from_str::<CargoMessage<'_>>(&line) {
Ok(msg) => cb(msg),
Ok(msg) => {
if builder.config.json_output {
// Forward JSON to stdout.
println!("{}", line);
}
cb(msg)
}
// If this was informational, just print it out and continue
Err(_) => println!("{}", line),
}
Expand Down
2 changes: 1 addition & 1 deletion src/doc/rustdoc/src/documentation-tests.md
Expand Up @@ -416,7 +416,7 @@ without including it in your main documentation. For example, you could write th
`lib.rs` to test your README as part of your doctests:

```rust,ignore
#![feature(extern_doc)]
#![feature(external_doc)]
#[doc(include="../README.md")]
#[cfg(doctest)]
Expand Down
3 changes: 2 additions & 1 deletion src/libcore/num/mod.rs
Expand Up @@ -3309,7 +3309,8 @@ Basic usage:
```
", $Feature, "assert_eq!(100", stringify!($SelfT), ".saturating_add(1), 101);
assert_eq!(200u8.saturating_add(127), 255);", $EndFeature, "
assert_eq!(", stringify!($SelfT), "::MAX.saturating_add(127), ", stringify!($SelfT), "::MAX);",
$EndFeature, "
```"),

#[stable(feature = "rust1", since = "1.0.0")]
Expand Down
2 changes: 1 addition & 1 deletion src/librustc_ast/tokenstream.rs
Expand Up @@ -392,7 +392,7 @@ impl TokenStream {
break;
}
}
token_trees = out.into_iter().map(|t| TokenTree::Token(t)).collect();
token_trees = out.into_iter().map(TokenTree::Token).collect();
if token_trees.len() != 1 {
debug!("break_tokens: broke {:?} to {:?}", tree, token_trees);
}
Expand Down
6 changes: 2 additions & 4 deletions src/librustc_ast_lowering/expr.rs
Expand Up @@ -1237,10 +1237,8 @@ impl<'hir> LoweringContext<'_, 'hir> {
) => {
assert!(!*late);
let out_op_sp = if input { op_sp2 } else { op_sp };
let msg = &format!(
"use `lateout` instead of \
`out` to avoid conflict"
);
let msg = "use `lateout` instead of \
`out` to avoid conflict";
err.span_help(out_op_sp, msg);
}
_ => {}
Expand Down
2 changes: 1 addition & 1 deletion src/librustc_builtin_macros/asm.rs
Expand Up @@ -457,7 +457,7 @@ fn expand_preparsed_asm(ecx: &mut ExtCtxt<'_>, sp: Span, args: AsmArgs) -> P<ast

let mut chars = arg.format.ty.chars();
let mut modifier = chars.next();
if !chars.next().is_none() {
if chars.next().is_some() {
let span = arg
.format
.ty_span
Expand Down
2 changes: 1 addition & 1 deletion src/librustc_codegen_ssa/mir/constant.rs
Expand Up @@ -63,7 +63,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
.tcx()
.destructure_const(ty::ParamEnv::reveal_all().and(&c))
.fields
.into_iter()
.iter()
.map(|field| {
if let Some(prim) = field.val.try_to_scalar() {
let layout = bx.layout_of(field_ty);
Expand Down
1 change: 1 addition & 0 deletions src/librustc_error_codes/error_codes.rs
Expand Up @@ -440,6 +440,7 @@ E0754: include_str!("./error_codes/E0754.md"),
E0758: include_str!("./error_codes/E0758.md"),
E0760: include_str!("./error_codes/E0760.md"),
E0761: include_str!("./error_codes/E0761.md"),
E0762: include_str!("./error_codes/E0762.md"),
;
// E0006, // merged with E0005
// E0008, // cannot bind by-move into a pattern guard
Expand Down
2 changes: 1 addition & 1 deletion src/librustc_error_codes/error_codes/E0642.md
@@ -1,6 +1,6 @@
Trait methods currently cannot take patterns as arguments.

Example of erroneous code:
Erroneous code example:

```compile_fail,E0642
trait Foo {
Expand Down
13 changes: 13 additions & 0 deletions src/librustc_error_codes/error_codes/E0762.md
@@ -0,0 +1,13 @@
A character literal wasn't ended with a quote.

Erroneous code example:

```compile_fail,E0762
static C: char = '●; // error!
```

To fix this error, add the missing quote:

```
static C: char = '●'; // ok!
```
8 changes: 2 additions & 6 deletions src/librustc_errors/annotate_snippet_emitter_writer.rs
Expand Up @@ -159,14 +159,10 @@ impl AnnotateSnippetEmitterWriter {
// FIXME(#59346): Not really sure when `fold` should be true or false
fold: false,
annotations: annotations
.into_iter()
.iter()
.map(|annotation| SourceAnnotation {
range: (annotation.start_col, annotation.end_col),
label: annotation
.label
.as_ref()
.map(|s| s.as_str())
.unwrap_or_default(),
label: annotation.label.as_deref().unwrap_or_default(),
annotation_type: annotation_type_for_level(*level),
})
.collect(),
Expand Down
22 changes: 22 additions & 0 deletions src/librustc_hir_pretty/lib.rs
Expand Up @@ -227,6 +227,28 @@ pub fn path_to_string(segment: &hir::Path<'_>) -> String {
to_string(NO_ANN, |s| s.print_path(segment, false))
}

pub fn fn_to_string(
decl: &hir::FnDecl<'_>,
header: hir::FnHeader,
name: Option<Symbol>,
generics: &hir::Generics<'_>,
vis: &hir::Visibility<'_>,
arg_names: &[Ident],
body_id: Option<hir::BodyId>,
) -> String {
to_string(NO_ANN, |s| s.print_fn(decl, header, name, generics, vis, arg_names, body_id))
}

pub fn enum_def_to_string(
enum_definition: &hir::EnumDef<'_>,
generics: &hir::Generics<'_>,
name: Symbol,
span: rustc_span::Span,
visibility: &hir::Visibility<'_>,
) -> String {
to_string(NO_ANN, |s| s.print_enum_def(enum_definition, generics, name, span, visibility))
}

impl<'a> State<'a> {
pub fn cbox(&mut self, u: usize) {
self.s.cbox(u);
Expand Down
2 changes: 1 addition & 1 deletion src/librustc_infer/infer/error_reporting/need_type_info.rs
Expand Up @@ -550,7 +550,7 @@ impl<'a, 'tcx> InferCtxt<'a, 'tcx> {
let error_code = error_code.into();
let mut err = self.tcx.sess.struct_span_err_with_code(
local_visitor.target_span,
&format!("type annotations needed"),
"type annotations needed",
error_code,
);

Expand Down
Expand Up @@ -77,8 +77,7 @@ impl<'a, 'tcx> NiceRegionError<'a, 'tcx> {
}
_ => {}
}
let mut type_param_span: MultiSpan =
visitor.types.iter().cloned().collect::<Vec<_>>().into();
let mut type_param_span: MultiSpan = visitor.types.to_vec().into();
for &span in &visitor.types {
type_param_span.push_span_label(
span,
Expand Down
4 changes: 2 additions & 2 deletions src/librustc_lexer/src/lib.rs
Expand Up @@ -187,9 +187,9 @@ pub fn strip_shebang(input: &str) -> Option<usize> {
// Ok, this is a shebang but if the next non-whitespace token is `[` or maybe
// a doc comment (due to `TokenKind::(Line,Block)Comment` ambiguity at lexer level),
// then it may be valid Rust code, so consider it Rust code.
let next_non_whitespace_token = tokenize(input_tail).map(|tok| tok.kind).filter(|tok|
let next_non_whitespace_token = tokenize(input_tail).map(|tok| tok.kind).find(|tok|
!matches!(tok, TokenKind::Whitespace | TokenKind::LineComment | TokenKind::BlockComment { .. })
).next();
);
if next_non_whitespace_token != Some(TokenKind::OpenBracket) {
// No other choice than to consider this a shebang.
return Some(2 + first_line_tail.len());
Expand Down
24 changes: 12 additions & 12 deletions src/librustc_middle/hir/map/mod.rs
Expand Up @@ -335,6 +335,16 @@ impl<'hir> Map<'hir> {
}
}

pub fn enclosing_body_owner(&self, hir_id: HirId) -> HirId {
for (parent, _) in self.parent_iter(hir_id) {
if let Some(body) = self.maybe_body_owned_by(parent) {
return self.body_owner(body);
}
}

bug!("no `enclosing_body_owner` for hir_id `{}`", hir_id);
}

/// Returns the `HirId` that corresponds to the definition of
/// which this is the body of, i.e., a `fn`, `const` or `static`
/// item (possibly associated), a closure, or a `hir::AnonConst`.
Expand Down Expand Up @@ -537,18 +547,8 @@ impl<'hir> Map<'hir> {

/// Whether the expression pointed at by `hir_id` belongs to a `const` evaluation context.
/// Used exclusively for diagnostics, to avoid suggestion function calls.
pub fn is_const_context(&self, hir_id: HirId) -> bool {
let parent_id = self.get_parent_item(hir_id);
match self.get(parent_id) {
Node::Item(&Item { kind: ItemKind::Const(..) | ItemKind::Static(..), .. })
| Node::TraitItem(&TraitItem { kind: TraitItemKind::Const(..), .. })
| Node::ImplItem(&ImplItem { kind: ImplItemKind::Const(..), .. })
| Node::AnonConst(_) => true,
Node::Item(&Item { kind: ItemKind::Fn(ref sig, ..), .. }) => {
sig.header.constness == Constness::Const
}
_ => false,
}
pub fn is_inside_const_context(&self, hir_id: HirId) -> bool {
self.body_const_context(self.local_def_id(self.enclosing_body_owner(hir_id))).is_some()
}

/// Whether `hir_id` corresponds to a `mod` or a crate.
Expand Down
Expand Up @@ -495,7 +495,7 @@ impl<'a, 'tcx> MirBorrowckCtxt<'a, 'tcx> {
let closure_id = hir.as_local_hir_id(self.mir_def_id);
let fn_call_id = hir.get_parent_node(closure_id);
let node = hir.get(fn_call_id);
let item_id = hir.get_parent_item(fn_call_id);
let item_id = hir.enclosing_body_owner(fn_call_id);
let mut look_at_return = true;
// If we can detect the expression to be an `fn` call where the closure was an argument,
// we point at the `fn` definition argument...
Expand Down
4 changes: 1 addition & 3 deletions src/librustc_mir/const_eval/eval_queries.rs
Expand Up @@ -309,9 +309,7 @@ pub fn const_eval_raw_provider<'tcx>(

let res = ecx.load_mir(cid.instance.def, cid.promoted);
res.and_then(|body| eval_body_using_ecx(&mut ecx, cid, &body))
.and_then(|place| {
Ok(RawConst { alloc_id: place.ptr.assert_ptr().alloc_id, ty: place.layout.ty })
})
.map(|place| RawConst { alloc_id: place.ptr.assert_ptr().alloc_id, ty: place.layout.ty })
.map_err(|error| {
let err = error_to_const_error(&ecx, error);
// errors in statics are always emitted as fatal errors
Expand Down
2 changes: 1 addition & 1 deletion src/librustc_mir/transform/check_packed_ref.rs
Expand Up @@ -51,7 +51,7 @@ impl<'a, 'tcx> Visitor<'tcx> for PackedRefChecker<'a, 'tcx> {
lint_root,
source_info.span,
|lint| {
lint.build(&format!("reference to packed field is unaligned",))
lint.build("reference to packed field is unaligned")
.note(
"fields of packed structs are not properly aligned, and creating \
a misaligned reference is undefined behavior (even if that \
Expand Down
4 changes: 2 additions & 2 deletions src/librustc_mir/transform/nrvo.rs
Expand Up @@ -111,7 +111,7 @@ fn local_eligible_for_nrvo(body: &mut mir::Body<'_>) -> Option<Local> {
copied_to_return_place = Some(returned_local);
}

return copied_to_return_place;
copied_to_return_place
}

fn find_local_assigned_to_return_place(
Expand All @@ -136,7 +136,7 @@ fn find_local_assigned_to_return_place(
}
}

return None;
None
}

// If this statement is an assignment of an unprojected local to the return place,
Expand Down
4 changes: 2 additions & 2 deletions src/librustc_mir/transform/simplify_try.rs
Expand Up @@ -99,7 +99,7 @@ fn get_arm_identity_info<'a, 'tcx>(stmts: &'a [Statement<'tcx>]) -> Option<ArmId
fn try_eat<'a, 'tcx>(
stmt_iter: &mut StmtIter<'a, 'tcx>,
test: impl Fn(&'a Statement<'tcx>) -> bool,
mut action: impl FnMut(usize, &'a Statement<'tcx>) -> (),
mut action: impl FnMut(usize, &'a Statement<'tcx>),
) {
while stmt_iter.peek().map(|(_, stmt)| test(stmt)).unwrap_or(false) {
let (idx, stmt) = stmt_iter.next().unwrap();
Expand Down Expand Up @@ -271,7 +271,7 @@ fn optimization_applies<'tcx>(
}

// Verify the assigment chain consists of the form b = a; c = b; d = c; etc...
if opt_info.field_tmp_assignments.len() == 0 {
if opt_info.field_tmp_assignments.is_empty() {
trace!("NO: no assignments found");
}
let mut last_assigned_to = opt_info.field_tmp_assignments[0].1;
Expand Down
48 changes: 25 additions & 23 deletions src/librustc_mir_build/build/matches/mod.rs
Expand Up @@ -10,7 +10,7 @@ use crate::build::ForGuard::{self, OutsideGuard, RefWithinGuard};
use crate::build::{BlockAnd, BlockAndExtension, Builder};
use crate::build::{GuardFrame, GuardFrameLocal, LocalsForNode};
use crate::hair::{self, *};
use rustc_data_structures::fx::{FxHashMap, FxHashSet};
use rustc_data_structures::{fx::{FxHashMap, FxHashSet}, stack::ensure_sufficient_stack};
use rustc_hir::HirId;
use rustc_index::bit_set::BitSet;
use rustc_middle::middle::region;
Expand Down Expand Up @@ -909,30 +909,32 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
split_or_candidate |= self.simplify_candidate(candidate);
}

if split_or_candidate {
// At least one of the candidates has been split into subcandidates.
// We need to change the candidate list to include those.
let mut new_candidates = Vec::new();
ensure_sufficient_stack(|| {
if split_or_candidate {
// At least one of the candidates has been split into subcandidates.
// We need to change the candidate list to include those.
let mut new_candidates = Vec::new();

for candidate in candidates {
candidate.visit_leaves(|leaf_candidate| new_candidates.push(leaf_candidate));
for candidate in candidates {
candidate.visit_leaves(|leaf_candidate| new_candidates.push(leaf_candidate));
}
self.match_simplified_candidates(
span,
start_block,
otherwise_block,
&mut *new_candidates,
fake_borrows,
);
} else {
self.match_simplified_candidates(
span,
start_block,
otherwise_block,
candidates,
fake_borrows,
);
}
self.match_simplified_candidates(
span,
start_block,
otherwise_block,
&mut *new_candidates,
fake_borrows,
);
} else {
self.match_simplified_candidates(
span,
start_block,
otherwise_block,
candidates,
fake_borrows,
);
};
});
}

fn match_simplified_candidates(
Expand Down
12 changes: 10 additions & 2 deletions src/librustc_parse/lexer/mod.rs
Expand Up @@ -325,7 +325,15 @@ impl<'a> StringReader<'a> {
let (lit_kind, mode, prefix_len, postfix_len) = match kind {
rustc_lexer::LiteralKind::Char { terminated } => {
if !terminated {
self.fatal_span_(start, suffix_start, "unterminated character literal").raise()
self.sess
.span_diagnostic
.struct_span_fatal_with_code(
self.mk_sp(start, suffix_start),
"unterminated character literal",
error_code!(E0762),
)
.emit();
FatalError.raise();
}
(token::Char, Mode::Char, 1, 1) // ' '
}
Expand Down Expand Up @@ -401,7 +409,7 @@ impl<'a> StringReader<'a> {
let content_end = suffix_start - BytePos(postfix_len);
let id = self.symbol_from_to(content_start, content_end);
self.validate_literal_escape(mode, content_start, content_end);
return (lit_kind, id);
(lit_kind, id)
}

pub fn pos(&self) -> BytePos {
Expand Down
2 changes: 1 addition & 1 deletion src/librustc_parse/parser/diagnostics.rs
Expand Up @@ -936,7 +936,7 @@ impl<'a> Parser<'a> {
} else if !sm.is_multiline(self.prev_token.span.until(self.token.span)) {
// The current token is in the same line as the prior token, not recoverable.
} else if [token::Comma, token::Colon].contains(&self.token.kind)
&& &self.prev_token.kind == &token::CloseDelim(token::Paren)
&& self.prev_token.kind == token::CloseDelim(token::Paren)
{
// Likely typo: The current token is on a new line and is expected to be
// `.`, `;`, `?`, or an operator after a close delimiter token.
Expand Down

0 comments on commit 50c0192

Please sign in to comment.