diff --git a/src/librustc_ast/tokenstream.rs b/src/librustc_ast/tokenstream.rs index 075aaa7e5bc01..15ae12ebf10e3 100644 --- a/src/librustc_ast/tokenstream.rs +++ b/src/librustc_ast/tokenstream.rs @@ -392,7 +392,7 @@ impl TokenStream { break; } } - token_trees = out.into_iter().map(|t| TokenTree::Token(t)).collect(); + token_trees = out.into_iter().map(TokenTree::Token).collect(); if token_trees.len() != 1 { debug!("break_tokens: broke {:?} to {:?}", tree, token_trees); } diff --git a/src/librustc_ast_lowering/expr.rs b/src/librustc_ast_lowering/expr.rs index c9037da377ebb..79d16a318d21b 100644 --- a/src/librustc_ast_lowering/expr.rs +++ b/src/librustc_ast_lowering/expr.rs @@ -1237,10 +1237,8 @@ impl<'hir> LoweringContext<'_, 'hir> { ) => { assert!(!*late); let out_op_sp = if input { op_sp2 } else { op_sp }; - let msg = &format!( - "use `lateout` instead of \ - `out` to avoid conflict" - ); + let msg = "use `lateout` instead of \ + `out` to avoid conflict"; err.span_help(out_op_sp, msg); } _ => {} diff --git a/src/librustc_builtin_macros/asm.rs b/src/librustc_builtin_macros/asm.rs index aabd5b5b5c31b..6127fcc5a4bee 100644 --- a/src/librustc_builtin_macros/asm.rs +++ b/src/librustc_builtin_macros/asm.rs @@ -457,7 +457,7 @@ fn expand_preparsed_asm(ecx: &mut ExtCtxt<'_>, sp: Span, args: AsmArgs) -> P> FunctionCx<'a, 'tcx, Bx> { .tcx() .destructure_const(ty::ParamEnv::reveal_all().and(&c)) .fields - .into_iter() + .iter() .map(|field| { if let Some(prim) = field.val.try_to_scalar() { let layout = bx.layout_of(field_ty); diff --git a/src/librustc_errors/annotate_snippet_emitter_writer.rs b/src/librustc_errors/annotate_snippet_emitter_writer.rs index 5b47364e714e9..265ba59cccb2a 100644 --- a/src/librustc_errors/annotate_snippet_emitter_writer.rs +++ b/src/librustc_errors/annotate_snippet_emitter_writer.rs @@ -159,14 +159,10 @@ impl AnnotateSnippetEmitterWriter { // FIXME(#59346): Not really sure when `fold` should be true or false fold: false, annotations: annotations - .into_iter() + .iter() .map(|annotation| SourceAnnotation { range: (annotation.start_col, annotation.end_col), - label: annotation - .label - .as_ref() - .map(|s| s.as_str()) - .unwrap_or_default(), + label: annotation.label.as_deref().unwrap_or_default(), annotation_type: annotation_type_for_level(*level), }) .collect(), diff --git a/src/librustc_infer/infer/error_reporting/need_type_info.rs b/src/librustc_infer/infer/error_reporting/need_type_info.rs index dfc7177921d31..81cb306d26444 100644 --- a/src/librustc_infer/infer/error_reporting/need_type_info.rs +++ b/src/librustc_infer/infer/error_reporting/need_type_info.rs @@ -550,7 +550,7 @@ impl<'a, 'tcx> InferCtxt<'a, 'tcx> { let error_code = error_code.into(); let mut err = self.tcx.sess.struct_span_err_with_code( local_visitor.target_span, - &format!("type annotations needed"), + "type annotations needed", error_code, ); diff --git a/src/librustc_infer/infer/error_reporting/nice_region_error/trait_impl_difference.rs b/src/librustc_infer/infer/error_reporting/nice_region_error/trait_impl_difference.rs index 5f14f799fc7aa..45aee2b39654d 100644 --- a/src/librustc_infer/infer/error_reporting/nice_region_error/trait_impl_difference.rs +++ b/src/librustc_infer/infer/error_reporting/nice_region_error/trait_impl_difference.rs @@ -77,8 +77,7 @@ impl<'a, 'tcx> NiceRegionError<'a, 'tcx> { } _ => {} } - let mut type_param_span: MultiSpan = - visitor.types.iter().cloned().collect::>().into(); + let mut type_param_span: MultiSpan = visitor.types.to_vec().into(); for &span in &visitor.types { type_param_span.push_span_label( span, diff --git a/src/librustc_lexer/src/lib.rs b/src/librustc_lexer/src/lib.rs index cf90c6d838635..200e7acf80235 100644 --- a/src/librustc_lexer/src/lib.rs +++ b/src/librustc_lexer/src/lib.rs @@ -187,9 +187,9 @@ pub fn strip_shebang(input: &str) -> Option { // Ok, this is a shebang but if the next non-whitespace token is `[` or maybe // a doc comment (due to `TokenKind::(Line,Block)Comment` ambiguity at lexer level), // then it may be valid Rust code, so consider it Rust code. - let next_non_whitespace_token = tokenize(input_tail).map(|tok| tok.kind).filter(|tok| + let next_non_whitespace_token = tokenize(input_tail).map(|tok| tok.kind).find(|tok| !matches!(tok, TokenKind::Whitespace | TokenKind::LineComment | TokenKind::BlockComment { .. }) - ).next(); + ); if next_non_whitespace_token != Some(TokenKind::OpenBracket) { // No other choice than to consider this a shebang. return Some(2 + first_line_tail.len()); diff --git a/src/librustc_mir/const_eval/eval_queries.rs b/src/librustc_mir/const_eval/eval_queries.rs index 695e0741e3598..2c0a40b4c543c 100644 --- a/src/librustc_mir/const_eval/eval_queries.rs +++ b/src/librustc_mir/const_eval/eval_queries.rs @@ -309,9 +309,7 @@ pub fn const_eval_raw_provider<'tcx>( let res = ecx.load_mir(cid.instance.def, cid.promoted); res.and_then(|body| eval_body_using_ecx(&mut ecx, cid, &body)) - .and_then(|place| { - Ok(RawConst { alloc_id: place.ptr.assert_ptr().alloc_id, ty: place.layout.ty }) - }) + .map(|place| RawConst { alloc_id: place.ptr.assert_ptr().alloc_id, ty: place.layout.ty }) .map_err(|error| { let err = error_to_const_error(&ecx, error); // errors in statics are always emitted as fatal errors diff --git a/src/librustc_mir/transform/check_packed_ref.rs b/src/librustc_mir/transform/check_packed_ref.rs index faad1a72327f4..043b2d0d1703e 100644 --- a/src/librustc_mir/transform/check_packed_ref.rs +++ b/src/librustc_mir/transform/check_packed_ref.rs @@ -51,7 +51,7 @@ impl<'a, 'tcx> Visitor<'tcx> for PackedRefChecker<'a, 'tcx> { lint_root, source_info.span, |lint| { - lint.build(&format!("reference to packed field is unaligned",)) + lint.build("reference to packed field is unaligned") .note( "fields of packed structs are not properly aligned, and creating \ a misaligned reference is undefined behavior (even if that \ diff --git a/src/librustc_mir/transform/nrvo.rs b/src/librustc_mir/transform/nrvo.rs index ffad1ebea005b..1f3d7bb7cc6f4 100644 --- a/src/librustc_mir/transform/nrvo.rs +++ b/src/librustc_mir/transform/nrvo.rs @@ -111,7 +111,7 @@ fn local_eligible_for_nrvo(body: &mut mir::Body<'_>) -> Option { copied_to_return_place = Some(returned_local); } - return copied_to_return_place; + copied_to_return_place } fn find_local_assigned_to_return_place( @@ -136,7 +136,7 @@ fn find_local_assigned_to_return_place( } } - return None; + None } // If this statement is an assignment of an unprojected local to the return place, diff --git a/src/librustc_mir/transform/simplify_try.rs b/src/librustc_mir/transform/simplify_try.rs index 41ffa6594418f..61f9c281559b7 100644 --- a/src/librustc_mir/transform/simplify_try.rs +++ b/src/librustc_mir/transform/simplify_try.rs @@ -99,7 +99,7 @@ fn get_arm_identity_info<'a, 'tcx>(stmts: &'a [Statement<'tcx>]) -> Option( stmt_iter: &mut StmtIter<'a, 'tcx>, test: impl Fn(&'a Statement<'tcx>) -> bool, - mut action: impl FnMut(usize, &'a Statement<'tcx>) -> (), + mut action: impl FnMut(usize, &'a Statement<'tcx>), ) { while stmt_iter.peek().map(|(_, stmt)| test(stmt)).unwrap_or(false) { let (idx, stmt) = stmt_iter.next().unwrap(); @@ -271,7 +271,7 @@ fn optimization_applies<'tcx>( } // Verify the assigment chain consists of the form b = a; c = b; d = c; etc... - if opt_info.field_tmp_assignments.len() == 0 { + if opt_info.field_tmp_assignments.is_empty() { trace!("NO: no assignments found"); } let mut last_assigned_to = opt_info.field_tmp_assignments[0].1; diff --git a/src/librustc_parse/lexer/mod.rs b/src/librustc_parse/lexer/mod.rs index 9bc6a50acad04..641e42558b84b 100644 --- a/src/librustc_parse/lexer/mod.rs +++ b/src/librustc_parse/lexer/mod.rs @@ -401,7 +401,7 @@ impl<'a> StringReader<'a> { let content_end = suffix_start - BytePos(postfix_len); let id = self.symbol_from_to(content_start, content_end); self.validate_literal_escape(mode, content_start, content_end); - return (lit_kind, id); + (lit_kind, id) } pub fn pos(&self) -> BytePos { diff --git a/src/librustc_parse/parser/diagnostics.rs b/src/librustc_parse/parser/diagnostics.rs index 660a63841bcef..8792605c08d38 100644 --- a/src/librustc_parse/parser/diagnostics.rs +++ b/src/librustc_parse/parser/diagnostics.rs @@ -936,7 +936,7 @@ impl<'a> Parser<'a> { } else if !sm.is_multiline(self.prev_token.span.until(self.token.span)) { // The current token is in the same line as the prior token, not recoverable. } else if [token::Comma, token::Colon].contains(&self.token.kind) - && &self.prev_token.kind == &token::CloseDelim(token::Paren) + && self.prev_token.kind == token::CloseDelim(token::Paren) { // Likely typo: The current token is on a new line and is expected to be // `.`, `;`, `?`, or an operator after a close delimiter token. diff --git a/src/librustc_parse/parser/mod.rs b/src/librustc_parse/parser/mod.rs index c00b608482933..47ae92c48bd88 100644 --- a/src/librustc_parse/parser/mod.rs +++ b/src/librustc_parse/parser/mod.rs @@ -193,7 +193,7 @@ impl TokenCursor { tree, self.stack.len() ); - collecting.buf.push(tree.clone().into()) + collecting.buf.push(tree.clone()) } } @@ -675,7 +675,7 @@ impl<'a> Parser<'a> { // If this was a missing `@` in a binding pattern // bail with a suggestion // https://github.com/rust-lang/rust/issues/72373 - if self.prev_token.is_ident() && &self.token.kind == &token::DotDot { + if self.prev_token.is_ident() && self.token.kind == token::DotDot { let msg = format!( "if you meant to bind the contents of \ the rest of the array pattern into `{}`, use `@`", @@ -1193,7 +1193,7 @@ impl<'a> Parser<'a> { let mut collected_tokens = if let Some(collecting) = self.token_cursor.collecting.take() { collecting.buf } else { - let msg = format!("our vector went away?"); + let msg = "our vector went away?"; debug!("collect_tokens: {}", msg); self.sess.span_diagnostic.delay_span_bug(self.token.span, &msg); // This can happen due to a bad interaction of two unrelated recovery mechanisms diff --git a/src/librustc_passes/intrinsicck.rs b/src/librustc_passes/intrinsicck.rs index 93344e907c344..e2bfcf18edb17 100644 --- a/src/librustc_passes/intrinsicck.rs +++ b/src/librustc_passes/intrinsicck.rs @@ -232,7 +232,7 @@ impl ExprVisitor<'tcx> { // size). if let Some((in_expr, Some(in_asm_ty))) = tied_input { if in_asm_ty != asm_ty { - let msg = &format!("incompatible types for asm inout argument"); + let msg = "incompatible types for asm inout argument"; let mut err = self.tcx.sess.struct_span_err(vec![in_expr.span, expr.span], msg); err.span_label( in_expr.span, diff --git a/src/librustdoc/html/sources.rs b/src/librustdoc/html/sources.rs index 018c0e82c4561..f0900c34a4ba3 100644 --- a/src/librustdoc/html/sources.rs +++ b/src/librustdoc/html/sources.rs @@ -126,7 +126,7 @@ impl<'a> SourceCollector<'a> { &self.scx.themes, ); self.scx.fs.write(&cur, v.as_bytes())?; - self.scx.local_sources.insert(p.clone(), href); + self.scx.local_sources.insert(p, href); Ok(()) } } diff --git a/src/librustdoc/passes/collect_intra_doc_links.rs b/src/librustdoc/passes/collect_intra_doc_links.rs index 149480ec80f29..ede48f7eed451 100644 --- a/src/librustdoc/passes/collect_intra_doc_links.rs +++ b/src/librustdoc/passes/collect_intra_doc_links.rs @@ -451,7 +451,7 @@ impl<'a, 'tcx> DocFolder for LinkCollector<'a, 'tcx> { .. }, .. - })) => segments.first().and_then(|seg| Some(seg.ident.to_string())), + })) => segments.first().map(|seg| seg.ident.to_string()), Some(hir::Node::Item(hir::Item { ident, kind: hir::ItemKind::Enum(..), .. }))