diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 5f9311635f6bf..15c6d4011e19f 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -90,9 +90,6 @@ jobs: - name: install WIX run: src/ci/scripts/install-wix.sh if: success() && !env.SKIP_JOB - - name: install InnoSetup - run: src/ci/scripts/install-innosetup.sh - if: success() && !env.SKIP_JOB - name: ensure the build happens on a partition with enough space run: src/ci/scripts/symlink-build-dir.sh if: success() && !env.SKIP_JOB @@ -193,9 +190,6 @@ jobs: - name: install WIX run: src/ci/scripts/install-wix.sh if: success() && !env.SKIP_JOB - - name: install InnoSetup - run: src/ci/scripts/install-innosetup.sh - if: success() && !env.SKIP_JOB - name: ensure the build happens on a partition with enough space run: src/ci/scripts/symlink-build-dir.sh if: success() && !env.SKIP_JOB @@ -537,9 +531,6 @@ jobs: - name: install WIX run: src/ci/scripts/install-wix.sh if: success() && !env.SKIP_JOB - - name: install InnoSetup - run: src/ci/scripts/install-innosetup.sh - if: success() && !env.SKIP_JOB - name: ensure the build happens on a partition with enough space run: src/ci/scripts/symlink-build-dir.sh if: success() && !env.SKIP_JOB diff --git a/src/bootstrap/builder.rs b/src/bootstrap/builder.rs index c5e2a4a38cff5..f9403260f7752 100644 --- a/src/bootstrap/builder.rs +++ b/src/bootstrap/builder.rs @@ -973,7 +973,7 @@ impl<'a> Builder<'a> { // we're gated on RUSTC_RPATH here. // // Ok, so the astute might be wondering "why isn't `-C rpath` used - // here?" and that is indeed a good question to task. This codegen + // here?" and that is indeed a good question to ask. This codegen // option is the compiler's current interface to generating an rpath. // Unfortunately it doesn't quite suffice for us. The flag currently // takes no value as an argument, so the compiler calculates what it diff --git a/src/bootstrap/dist.rs b/src/bootstrap/dist.rs index 8a2463d378fdb..28430b56ee56f 100644 --- a/src/bootstrap/dist.rs +++ b/src/bootstrap/dist.rs @@ -1875,28 +1875,7 @@ impl Step for Extended { prepare("rust-mingw"); } - builder.install(&xform(&etc.join("exe/rust.iss")), &exe, 0o644); - builder.install(&etc.join("exe/modpath.iss"), &exe, 0o644); - builder.install(&etc.join("exe/upgrade.iss"), &exe, 0o644); builder.install(&etc.join("gfx/rust-logo.ico"), &exe, 0o644); - builder.create(&exe.join("LICENSE.txt"), &license); - - // Generate exe installer - builder.info("building `exe` installer with `iscc`"); - let mut cmd = Command::new("iscc"); - cmd.arg("rust.iss").arg("/Q").current_dir(&exe); - if target.contains("windows-gnu") { - cmd.arg("/dMINGW"); - } - add_env(builder, &mut cmd, target); - let time = timeit(builder); - builder.run(&mut cmd); - drop(time); - builder.install( - &exe.join(format!("{}-{}.exe", pkgname(builder, "rust"), target)), - &distdir(builder), - 0o755, - ); // Generate msi installer let wix = PathBuf::from(env::var_os("WIX").unwrap()); diff --git a/src/ci/azure-pipelines/steps/run.yml b/src/ci/azure-pipelines/steps/run.yml index e43116c06b6b7..34fc4d76fa207 100644 --- a/src/ci/azure-pipelines/steps/run.yml +++ b/src/ci/azure-pipelines/steps/run.yml @@ -66,10 +66,6 @@ steps: displayName: Install wix condition: and(succeeded(), not(variables.SKIP_JOB)) -- bash: src/ci/scripts/install-innosetup.sh - displayName: Install InnoSetup - condition: and(succeeded(), not(variables.SKIP_JOB)) - - bash: src/ci/scripts/symlink-build-dir.sh displayName: Ensure the build happens on a partition with enough space condition: and(succeeded(), not(variables.SKIP_JOB)) diff --git a/src/ci/github-actions/ci.yml b/src/ci/github-actions/ci.yml index daa2d55c04399..00170226e63c7 100644 --- a/src/ci/github-actions/ci.yml +++ b/src/ci/github-actions/ci.yml @@ -131,10 +131,6 @@ x--expand-yaml-anchors--remove: run: src/ci/scripts/install-wix.sh <<: *step - - name: install InnoSetup - run: src/ci/scripts/install-innosetup.sh - <<: *step - - name: ensure the build happens on a partition with enough space run: src/ci/scripts/symlink-build-dir.sh <<: *step diff --git a/src/ci/scripts/install-innosetup.sh b/src/ci/scripts/install-innosetup.sh deleted file mode 100755 index 04ca249777a11..0000000000000 --- a/src/ci/scripts/install-innosetup.sh +++ /dev/null @@ -1,18 +0,0 @@ -#!/bin/bash -# We use InnoSetup and its `iscc` program to also create combined installers. -# Honestly at this point WIX above and `iscc` are just holdovers from -# oh-so-long-ago and are required for creating installers on Windows. I think -# one is MSI installers and one is EXE, but they're not used so frequently at -# this point anyway so perhaps it's a wash! - -set -euo pipefail -IFS=$'\n\t' - -source "$(cd "$(dirname "$0")" && pwd)/../shared.sh" - -if isWindows; then - curl.exe -o is-install.exe "${MIRRORS_BASE}/2017-08-22-is.exe" - cmd.exe //c "is-install.exe /VERYSILENT /SUPPRESSMSGBOXES /NORESTART /SP-" - - ciCommandAddPath "C:\\Program Files (x86)\\Inno Setup 5" -fi diff --git a/src/etc/installer/exe/modpath.iss b/src/etc/installer/exe/modpath.iss deleted file mode 100644 index 2cfc8698c4b67..0000000000000 --- a/src/etc/installer/exe/modpath.iss +++ /dev/null @@ -1,219 +0,0 @@ -// ---------------------------------------------------------------------------- -// -// Inno Setup Ver: 5.4.2 -// Script Version: 1.4.1 -// Author: Jared Breland -// Homepage: http://www.legroom.net/software -// License: GNU Lesser General Public License (LGPL), version 3 -// http://www.gnu.org/licenses/lgpl.html -// -// Script Function: -// Allow modification of environmental path directly from Inno Setup installers -// -// Instructions: -// Copy modpath.iss to the same directory as your setup script -// -// Add this statement to your [Setup] section -// ChangesEnvironment=true -// -// Add this statement to your [Tasks] section -// You can change the Description or Flags -// You can change the Name, but it must match the ModPathName setting below -// Name: modifypath; Description: &Add application directory to your environmental path; Flags: unchecked -// -// Add the following to the end of your [Code] section -// ModPathName defines the name of the task defined above -// ModPathType defines whether the 'user' or 'system' path will be modified; -// this will default to user if anything other than system is set -// setArrayLength must specify the total number of dirs to be added -// Result[0] contains first directory, Result[1] contains second, etc. -// const -// ModPathName = 'modifypath'; -// ModPathType = 'user'; -// -// function ModPathDir(): TArrayOfString; -// begin -// setArrayLength(Result, 1); -// Result[0] := ExpandConstant('{app}'); -// end; -// #include "modpath.iss" -// ---------------------------------------------------------------------------- - -procedure ModPath(); -var - oldpath: String; - newpath: String; - updatepath: Boolean; - pathArr: TArrayOfString; - aExecFile: String; - aExecArr: TArrayOfString; - i, d: Integer; - pathdir: TArrayOfString; - regroot: Integer; - regpath: String; - -begin - // Get constants from main script and adjust behavior accordingly - // ModPathType MUST be 'system' or 'user'; force 'user' if invalid - if ModPathType = 'system' then begin - regroot := HKEY_LOCAL_MACHINE; - regpath := 'SYSTEM\CurrentControlSet\Control\Session Manager\Environment'; - end else begin - regroot := HKEY_CURRENT_USER; - regpath := 'Environment'; - end; - - // Get array of new directories and act on each individually - pathdir := ModPathDir(); - for d := 0 to GetArrayLength(pathdir)-1 do begin - updatepath := true; - - // Modify WinNT path - if UsingWinNT() = true then begin - - // Get current path, split into an array - RegQueryStringValue(regroot, regpath, 'Path', oldpath); - oldpath := oldpath + ';'; - i := 0; - - while (Pos(';', oldpath) > 0) do begin - SetArrayLength(pathArr, i+1); - pathArr[i] := Copy(oldpath, 0, Pos(';', oldpath)-1); - oldpath := Copy(oldpath, Pos(';', oldpath)+1, Length(oldpath)); - i := i + 1; - - // Check if current directory matches app dir - if pathdir[d] = pathArr[i-1] then begin - // if uninstalling, remove dir from path - if IsUninstaller() = true then begin - continue; - // if installing, flag that dir already exists in path - end else begin - updatepath := false; - end; - end; - - // Add current directory to new path - if i = 1 then begin - newpath := pathArr[i-1]; - end else begin - newpath := newpath + ';' + pathArr[i-1]; - end; - end; - - // Append app dir to path if not already included - if (IsUninstaller() = false) AND (updatepath = true) then - newpath := newpath + ';' + pathdir[d]; - - // Write new path - RegWriteStringValue(regroot, regpath, 'Path', newpath); - - // Modify Win9x path - end else begin - - // Convert to shortened dirname - pathdir[d] := GetShortName(pathdir[d]); - - // If autoexec.bat exists, check if app dir already exists in path - aExecFile := 'C:\AUTOEXEC.BAT'; - if FileExists(aExecFile) then begin - LoadStringsFromFile(aExecFile, aExecArr); - for i := 0 to GetArrayLength(aExecArr)-1 do begin - if IsUninstaller() = false then begin - // If app dir already exists while installing, skip add - if (Pos(pathdir[d], aExecArr[i]) > 0) then - updatepath := false; - break; - end else begin - // If app dir exists and = what we originally set, then delete at uninstall - if aExecArr[i] = 'SET PATH=%PATH%;' + pathdir[d] then - aExecArr[i] := ''; - end; - end; - end; - - // If app dir not found, or autoexec.bat didn't exist, then (create and) append to current path - if (IsUninstaller() = false) AND (updatepath = true) then begin - SaveStringToFile(aExecFile, #13#10 + 'SET PATH=%PATH%;' + pathdir[d], True); - - // If uninstalling, write the full autoexec out - end else begin - SaveStringsToFile(aExecFile, aExecArr, False); - end; - end; - end; -end; - -// Split a string into an array using passed delimiter -procedure Explode(var Dest: TArrayOfString; Text: String; Separator: String); -var - i: Integer; -begin - i := 0; - repeat - SetArrayLength(Dest, i+1); - if Pos(Separator,Text) > 0 then begin - Dest[i] := Copy(Text, 1, Pos(Separator, Text)-1); - Text := Copy(Text, Pos(Separator,Text) + Length(Separator), Length(Text)); - i := i + 1; - end else begin - Dest[i] := Text; - Text := ''; - end; - until Length(Text)=0; -end; - - -procedure ModPathCurStepChanged(CurStep: TSetupStep); -var - taskname: String; -begin - taskname := ModPathName; - if CurStep = ssPostInstall then - if IsTaskSelected(taskname) then - ModPath(); -end; - -procedure CurUninstallStepChanged(CurUninstallStep: TUninstallStep); -var - aSelectedTasks: TArrayOfString; - i: Integer; - taskname: String; - regpath: String; - regstring: String; - appid: String; -begin - // only run during actual uninstall - if CurUninstallStep = usUninstall then begin - // get list of selected tasks saved in registry at install time - appid := '{#emit SetupSetting("AppId")}'; - if appid = '' then appid := '{#emit SetupSetting("AppName")}'; - regpath := ExpandConstant('Software\Microsoft\Windows\CurrentVersion\Uninstall\'+appid+'_is1'); - RegQueryStringValue(HKLM, regpath, 'Inno Setup: Selected Tasks', regstring); - if regstring = '' then RegQueryStringValue(HKCU, regpath, 'Inno Setup: Selected Tasks', regstring); - - // check each task; if matches modpath taskname, trigger patch removal - if regstring <> '' then begin - taskname := ModPathName; - Explode(aSelectedTasks, regstring, ','); - if GetArrayLength(aSelectedTasks) > 0 then begin - for i := 0 to GetArrayLength(aSelectedTasks)-1 do begin - if comparetext(aSelectedTasks[i], taskname) = 0 then - ModPath(); - end; - end; - end; - end; -end; - -function NeedRestart(): Boolean; -var - taskname: String; -begin - taskname := ModPathName; - if IsTaskSelected(taskname) and not UsingWinNT() then begin - Result := True; - end else begin - Result := False; - end; -end; diff --git a/src/etc/installer/exe/rust.iss b/src/etc/installer/exe/rust.iss deleted file mode 100644 index 70648beac38b0..0000000000000 --- a/src/etc/installer/exe/rust.iss +++ /dev/null @@ -1,87 +0,0 @@ -#define CFG_RELEASE_NUM GetEnv("CFG_RELEASE_NUM") -#define CFG_RELEASE GetEnv("CFG_RELEASE") -#define CFG_PACKAGE_NAME GetEnv("CFG_PACKAGE_NAME") -#define CFG_BUILD GetEnv("CFG_BUILD") - -[Setup] - -SetupIconFile=rust-logo.ico -AppName=Rust -AppVersion={#CFG_RELEASE} -AppCopyright=Copyright (C) 2006-2014 Mozilla Foundation, MIT license -AppPublisher=Mozilla Foundation -AppPublisherURL=http://www.rust-lang.org -VersionInfoVersion={#CFG_RELEASE_NUM} -LicenseFile=LICENSE.txt - -PrivilegesRequired=lowest -DisableWelcomePage=true -DisableProgramGroupPage=true -DisableReadyPage=true -DisableStartupPrompt=true - -OutputDir=.\ -SourceDir=.\ -OutputBaseFilename={#CFG_PACKAGE_NAME}-{#CFG_BUILD} -DefaultDirName={sd}\Rust - -Compression=lzma2/normal -InternalCompressLevel=normal -SolidCompression=no - -ChangesEnvironment=true -ChangesAssociations=no -AllowUNCPath=false -AllowNoIcons=true -Uninstallable=yes - -[Tasks] -Name: modifypath; Description: &Add {app}\bin to your PATH (recommended) - -[Components] -Name: rust; Description: "Rust compiler and standard crates"; Types: full compact custom; Flags: fixed -#ifdef MINGW -Name: gcc; Description: "Linker and platform libraries"; Types: full -#endif -Name: docs; Description: "HTML documentation"; Types: full -Name: cargo; Description: "Cargo, the Rust package manager"; Types: full -Name: std; Description: "The Rust Standard Library"; Types: full -// tool-rls-start -Name: rls; Description: "RLS, the Rust Language Server" -// tool-rls-end - -[Files] -Source: "rustc/*.*"; DestDir: "{app}"; Flags: ignoreversion recursesubdirs; Components: rust -#ifdef MINGW -Source: "rust-mingw/*.*"; DestDir: "{app}"; Flags: ignoreversion recursesubdirs; Components: gcc -#endif -Source: "rust-docs/*.*"; DestDir: "{app}"; Flags: ignoreversion recursesubdirs; Components: docs -Source: "cargo/*.*"; DestDir: "{app}"; Flags: ignoreversion recursesubdirs; Components: cargo -Source: "rust-std/*.*"; DestDir: "{app}"; Flags: ignoreversion recursesubdirs; Components: std -// tool-rls-start -Source: "rls/*.*"; DestDir: "{app}"; Flags: ignoreversion recursesubdirs; Components: rls -Source: "rust-analysis/*.*"; DestDir: "{app}"; Flags: ignoreversion recursesubdirs; Components: rls -// tool-rls-end - -[Code] -const - ModPathName = 'modifypath'; - ModPathType = 'user'; - -function ModPathDir(): TArrayOfString; -begin - setArrayLength(Result, 1) - Result[0] := ExpandConstant('{app}\bin'); -end; - -#include "modpath.iss" -#include "upgrade.iss" - -// Both modpath.iss and upgrade.iss want to overload CurStepChanged. -// This version does the overload then delegates to each. - -procedure CurStepChanged(CurStep: TSetupStep); -begin - UpgradeCurStepChanged(CurStep); - ModPathCurStepChanged(CurStep); -end; diff --git a/src/etc/installer/exe/upgrade.iss b/src/etc/installer/exe/upgrade.iss deleted file mode 100644 index 29da7c333bb72..0000000000000 --- a/src/etc/installer/exe/upgrade.iss +++ /dev/null @@ -1,61 +0,0 @@ -// The following code taken from https://stackoverflow.com/questions/2000296/innosetup-how-to-automatically-uninstall-previous-installed-version -// It performs upgrades by running the uninstaller before the install - -///////////////////////////////////////////////////////////////////// -function GetUninstallString(): String; -var - sUnInstPath: String; - sUnInstallString: String; -begin - sUnInstPath := ExpandConstant('Software\Microsoft\Windows\CurrentVersion\Uninstall\Rust_is1'); - sUnInstallString := ''; - if not RegQueryStringValue(HKLM, sUnInstPath, 'UninstallString', sUnInstallString) then - RegQueryStringValue(HKCU, sUnInstPath, 'UninstallString', sUnInstallString); - Result := sUnInstallString; -end; - - -///////////////////////////////////////////////////////////////////// -function IsUpgrade(): Boolean; -begin - Result := (GetUninstallString() <> ''); -end; - - -///////////////////////////////////////////////////////////////////// -function UnInstallOldVersion(): Integer; -var - sUnInstallString: String; - iResultCode: Integer; -begin -// Return Values: -// 1 - uninstall string is empty -// 2 - error executing the UnInstallString -// 3 - successfully executed the UnInstallString - - // default return value - Result := 0; - - // get the uninstall string of the old app - sUnInstallString := GetUninstallString(); - if sUnInstallString <> '' then begin - sUnInstallString := RemoveQuotes(sUnInstallString); - if Exec(sUnInstallString, '/SILENT /NORESTART /SUPPRESSMSGBOXES','', SW_HIDE, ewWaitUntilTerminated, iResultCode) then - Result := 3 - else - Result := 2; - end else - Result := 1; -end; - -///////////////////////////////////////////////////////////////////// -procedure UpgradeCurStepChanged(CurStep: TSetupStep); -begin - if (CurStep=ssInstall) then - begin - if (IsUpgrade()) then - begin - UnInstallOldVersion(); - end; - end; -end; diff --git a/src/librustc_ast/attr/mod.rs b/src/librustc_ast/attr/mod.rs index 6c128f0176f66..76139209c9151 100644 --- a/src/librustc_ast/attr/mod.rs +++ b/src/librustc_ast/attr/mod.rs @@ -475,7 +475,7 @@ impl MetaItem { let span = span.with_hi(segments.last().unwrap().ident.span.hi()); Path { span, segments } } - Some(TokenTree::Token(Token { kind: token::Interpolated(nt, _), .. })) => match *nt { + Some(TokenTree::Token(Token { kind: token::Interpolated(nt), .. })) => match *nt { token::Nonterminal::NtMeta(ref item) => return item.meta(item.path.span), token::Nonterminal::NtPath(ref path) => path.clone(), _ => return None, @@ -560,6 +560,9 @@ impl MetaItemKind { tokens: &mut impl Iterator, ) -> Option { match tokens.next() { + Some(TokenTree::Delimited(_, token::NoDelim, inner_tokens)) => { + MetaItemKind::name_value_from_tokens(&mut inner_tokens.trees()) + } Some(TokenTree::Token(token)) => { Lit::from_token(&token).ok().map(MetaItemKind::NameValue) } @@ -619,13 +622,20 @@ impl NestedMetaItem { where I: Iterator, { - if let Some(TokenTree::Token(token)) = tokens.peek() { - if let Ok(lit) = Lit::from_token(token) { + match tokens.peek() { + Some(TokenTree::Token(token)) => { + if let Ok(lit) = Lit::from_token(token) { + tokens.next(); + return Some(NestedMetaItem::Literal(lit)); + } + } + Some(TokenTree::Delimited(_, token::NoDelim, inner_tokens)) => { + let inner_tokens = inner_tokens.clone(); tokens.next(); - return Some(NestedMetaItem::Literal(lit)); + return NestedMetaItem::from_tokens(&mut inner_tokens.into_trees().peekable()); } + _ => {} } - MetaItem::from_tokens(tokens).map(NestedMetaItem::MetaItem) } } diff --git a/src/librustc_ast/mut_visit.rs b/src/librustc_ast/mut_visit.rs index 3fd2815daa14f..54f81ef106fe1 100644 --- a/src/librustc_ast/mut_visit.rs +++ b/src/librustc_ast/mut_visit.rs @@ -656,7 +656,7 @@ pub fn noop_visit_token(t: &mut Token, vis: &mut T) { *span = ident.span; return; // Avoid visiting the span for the second time. } - token::Interpolated(nt, _) => { + token::Interpolated(nt) => { let mut nt = Lrc::make_mut(nt); vis.visit_interpolated(&mut nt); } diff --git a/src/librustc_ast/token.rs b/src/librustc_ast/token.rs index 89be3e6e212cc..173ea5e48d682 100644 --- a/src/librustc_ast/token.rs +++ b/src/librustc_ast/token.rs @@ -11,7 +11,7 @@ use crate::tokenstream::TokenTree; use rustc_data_structures::stable_hasher::{HashStable, StableHasher}; use rustc_data_structures::sync::Lrc; use rustc_macros::HashStable_Generic; -use rustc_span::symbol::kw; +use rustc_span::symbol::{kw, sym}; use rustc_span::symbol::{Ident, Symbol}; use rustc_span::{self, Span, DUMMY_SP}; use std::borrow::Cow; @@ -182,15 +182,6 @@ fn ident_can_begin_type(name: Symbol, span: Span, is_raw: bool) -> bool { .contains(&name) } -/// A hack used to pass AST fragments to attribute and derive macros -/// as a single nonterminal token instead of a token stream. -/// FIXME: It needs to be removed, but there are some compatibility issues (see #73345). -#[derive(Clone, PartialEq, RustcEncodable, RustcDecodable, Debug, HashStable_Generic)] -pub enum FlattenGroup { - Yes, - No, -} - #[derive(Clone, PartialEq, RustcEncodable, RustcDecodable, Debug, HashStable_Generic)] pub enum TokenKind { /* Expression-operator symbols. */ @@ -245,7 +236,7 @@ pub enum TokenKind { /// treat regular and interpolated lifetime identifiers in the same way. Lifetime(Symbol), - Interpolated(Lrc, FlattenGroup), + Interpolated(Lrc), // Can be expanded into several tokens. /// A doc comment. @@ -352,7 +343,7 @@ impl Token { /// if they keep spans or perform edition checks. pub fn uninterpolated_span(&self) -> Span { match &self.kind { - Interpolated(nt, _) => nt.span(), + Interpolated(nt) => nt.span(), _ => self.span, } } @@ -391,7 +382,7 @@ impl Token { ModSep | // global path Lifetime(..) | // labeled loop Pound => true, // expression attributes - Interpolated(ref nt, _) => match **nt { + Interpolated(ref nt) => match **nt { NtLiteral(..) | NtExpr(..) | NtBlock(..) | @@ -417,7 +408,7 @@ impl Token { Lifetime(..) | // lifetime bound in trait object Lt | BinOp(Shl) | // associated path ModSep => true, // global path - Interpolated(ref nt, _) => match **nt { + Interpolated(ref nt) => match **nt { NtTy(..) | NtPath(..) => true, _ => false, }, @@ -429,7 +420,7 @@ impl Token { pub fn can_begin_const_arg(&self) -> bool { match self.kind { OpenDelim(Brace) => true, - Interpolated(ref nt, _) => match **nt { + Interpolated(ref nt) => match **nt { NtExpr(..) | NtBlock(..) | NtLiteral(..) => true, _ => false, }, @@ -464,7 +455,7 @@ impl Token { match self.uninterpolate().kind { Literal(..) | BinOp(Minus) => true, Ident(name, false) if name.is_bool_lit() => true, - Interpolated(ref nt, _) => match &**nt { + Interpolated(ref nt) => match &**nt { NtLiteral(_) => true, NtExpr(e) => match &e.kind { ast::ExprKind::Lit(_) => true, @@ -485,7 +476,7 @@ impl Token { // otherwise returns the original token. pub fn uninterpolate(&self) -> Cow<'_, Token> { match &self.kind { - Interpolated(nt, _) => match **nt { + Interpolated(nt) => match **nt { NtIdent(ident, is_raw) => { Cow::Owned(Token::new(Ident(ident.name, is_raw), ident.span)) } @@ -532,7 +523,7 @@ impl Token { /// Returns `true` if the token is an interpolated path. fn is_path(&self) -> bool { - if let Interpolated(ref nt, _) = self.kind { + if let Interpolated(ref nt) = self.kind { if let NtPath(..) = **nt { return true; } @@ -544,7 +535,7 @@ impl Token { /// That is, is this a pre-parsed expression dropped into the token stream /// (which happens while parsing the result of macro expansion)? pub fn is_whole_expr(&self) -> bool { - if let Interpolated(ref nt, _) = self.kind { + if let Interpolated(ref nt) = self.kind { if let NtExpr(_) | NtLiteral(_) | NtPath(_) | NtIdent(..) | NtBlock(_) = **nt { return true; } @@ -555,7 +546,7 @@ impl Token { // Is the token an interpolated block (`$b:block`)? pub fn is_whole_block(&self) -> bool { - if let Interpolated(ref nt, _) = self.kind { + if let Interpolated(ref nt) = self.kind { if let NtBlock(..) = **nt { return true; } @@ -785,6 +776,26 @@ impl Nonterminal { NtTT(tt) => tt.span(), } } + + /// This nonterminal looks like some specific enums from + /// `proc-macro-hack` and `procedural-masquerade` crates. + /// We need to maintain some special pretty-printing behavior for them due to incorrect + /// asserts in old versions of those crates and their wide use in the ecosystem. + /// See issue #73345 for more details. + /// FIXME(#73933): Remove this eventually. + pub fn pretty_printing_compatibility_hack(&self) -> bool { + if let NtItem(item) = self { + let name = item.ident.name; + if name == sym::ProceduralMasqueradeDummyType || name == sym::ProcMacroHack { + if let ast::ItemKind::Enum(enum_def, _) = &item.kind { + if let [variant] = &*enum_def.variants { + return variant.ident.name == sym::Input; + } + } + } + } + false + } } impl PartialEq for Nonterminal { diff --git a/src/librustc_ast/util/literal.rs b/src/librustc_ast/util/literal.rs index ea59f867c59d2..4428d09902b92 100644 --- a/src/librustc_ast/util/literal.rs +++ b/src/librustc_ast/util/literal.rs @@ -205,7 +205,7 @@ impl Lit { token::Lit::new(token::Bool, name, None) } token::Literal(lit) => lit, - token::Interpolated(ref nt, _) => { + token::Interpolated(ref nt) => { if let token::NtExpr(expr) | token::NtLiteral(expr) = &**nt { if let ast::ExprKind::Lit(lit) = &expr.kind { return Ok(lit.clone()); diff --git a/src/librustc_ast_lowering/lib.rs b/src/librustc_ast_lowering/lib.rs index bc0980f041b94..b5d3beb4f8a9b 100644 --- a/src/librustc_ast_lowering/lib.rs +++ b/src/librustc_ast_lowering/lib.rs @@ -39,8 +39,8 @@ use rustc_ast::ast; use rustc_ast::ast::*; use rustc_ast::attr; use rustc_ast::node_id::NodeMap; -use rustc_ast::token::{self, Nonterminal, Token}; -use rustc_ast::tokenstream::{TokenStream, TokenTree}; +use rustc_ast::token::{self, DelimToken, Nonterminal, Token}; +use rustc_ast::tokenstream::{DelimSpan, TokenStream, TokenTree}; use rustc_ast::visit::{self, AssocCtxt, Visitor}; use rustc_ast::walk_list; use rustc_ast_pretty::pprust; @@ -1027,9 +1027,14 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> { fn lower_token(&mut self, token: Token) -> TokenStream { match token.kind { - token::Interpolated(nt, _) => { + token::Interpolated(nt) => { let tts = (self.nt_to_tokenstream)(&nt, &self.sess.parse_sess, token.span); - self.lower_token_stream(tts) + TokenTree::Delimited( + DelimSpan::from_single(token.span), + DelimToken::NoDelim, + self.lower_token_stream(tts), + ) + .into() } _ => TokenTree::Token(token).into(), } diff --git a/src/librustc_ast_pretty/pprust.rs b/src/librustc_ast_pretty/pprust.rs index 86faa1f086ce2..5a6e10f49f9e6 100644 --- a/src/librustc_ast_pretty/pprust.rs +++ b/src/librustc_ast_pretty/pprust.rs @@ -148,9 +148,14 @@ pub fn to_string(f: impl FnOnce(&mut State<'_>)) -> String { printer.s.eof() } -// This makes comma-separated lists look slightly nicer, -// and also addresses a specific regression described in issue #63896. +// This makes printed token streams look slightly nicer, +// and also addresses some specific regressions described in #63896 and #73345. fn tt_prepend_space(tt: &TokenTree, prev: &TokenTree) -> bool { + if let TokenTree::Token(token) = prev { + if let token::DocComment(s) = token.kind { + return !s.as_str().starts_with("//"); + } + } match tt { TokenTree::Token(token) => match token.kind { token::Comma => false, @@ -163,7 +168,14 @@ fn tt_prepend_space(tt: &TokenTree, prev: &TokenTree) -> bool { }, _ => true, }, - _ => true, + TokenTree::Delimited(_, DelimToken::Bracket, _) => match prev { + TokenTree::Token(token) => match token.kind { + token::Pound => false, + _ => true, + }, + _ => true, + }, + TokenTree::Delimited(..) => true, } } @@ -245,7 +257,7 @@ fn token_kind_to_string_ext(tok: &TokenKind, convert_dollar_crate: Option) token::CloseDelim(token::Bracket) => "]".to_string(), token::OpenDelim(token::Brace) => "{".to_string(), token::CloseDelim(token::Brace) => "}".to_string(), - token::OpenDelim(token::NoDelim) | token::CloseDelim(token::NoDelim) => " ".to_string(), + token::OpenDelim(token::NoDelim) | token::CloseDelim(token::NoDelim) => "".to_string(), token::Pound => "#".to_string(), token::Dollar => "$".to_string(), token::Question => "?".to_string(), @@ -266,7 +278,7 @@ fn token_kind_to_string_ext(tok: &TokenKind, convert_dollar_crate: Option) token::Shebang(s) => format!("/* shebang: {}*/", s), token::Unknown(s) => s.to_string(), - token::Interpolated(ref nt, _) => nonterminal_to_string(nt), + token::Interpolated(ref nt) => nonterminal_to_string(nt), } } diff --git a/src/librustc_data_structures/obligation_forest/mod.rs b/src/librustc_data_structures/obligation_forest/mod.rs index 197169b7036e0..7cf5202d919f8 100644 --- a/src/librustc_data_structures/obligation_forest/mod.rs +++ b/src/librustc_data_structures/obligation_forest/mod.rs @@ -412,9 +412,7 @@ impl ObligationForest { // be computed with the initial length, and we would miss the appended // nodes. Therefore we use a `while` loop. let mut index = 0; - while index < self.nodes.len() { - let node = &mut self.nodes[index]; - + while let Some(node) = self.nodes.get_mut(index) { // `processor.process_obligation` can modify the predicate within // `node.obligation`, and that predicate is the key used for // `self.active_cache`. This means that `self.active_cache` can get @@ -666,8 +664,8 @@ impl ObligationForest { for node in &mut self.nodes { let mut i = 0; - while i < node.dependents.len() { - let new_index = node_rewrites[node.dependents[i]]; + while let Some(dependent) = node.dependents.get_mut(i) { + let new_index = node_rewrites[*dependent]; if new_index >= orig_nodes_len { node.dependents.swap_remove(i); if i == 0 && node.has_parent { @@ -675,7 +673,7 @@ impl ObligationForest { node.has_parent = false; } } else { - node.dependents[i] = new_index; + *dependent = new_index; i += 1; } } diff --git a/src/librustc_data_structures/transitive_relation.rs b/src/librustc_data_structures/transitive_relation.rs index de503fe8228aa..189da3395ad1b 100644 --- a/src/librustc_data_structures/transitive_relation.rs +++ b/src/librustc_data_structures/transitive_relation.rs @@ -391,14 +391,12 @@ impl TransitiveRelation { /// - Input: `[a, x, b, y]`. Output: `[a, x]`. fn pare_down(candidates: &mut Vec, closure: &BitMatrix) { let mut i = 0; - while i < candidates.len() { - let candidate_i = candidates[i]; + while let Some(&candidate_i) = candidates.get(i) { i += 1; let mut j = i; let mut dead = 0; - while j < candidates.len() { - let candidate_j = candidates[j]; + while let Some(&candidate_j) = candidates.get(j) { if closure.contains(candidate_i, candidate_j) { // If `i` can reach `j`, then we can remove `j`. So just // mark it as dead and move on; subsequent indices will be diff --git a/src/librustc_error_codes/error_codes/E0712.md b/src/librustc_error_codes/error_codes/E0712.md index 89f60084f0a60..7e09210e7874c 100644 --- a/src/librustc_error_codes/error_codes/E0712.md +++ b/src/librustc_error_codes/error_codes/E0712.md @@ -1,5 +1,5 @@ -This error occurs because a borrow of a thread-local variable was made inside a -function which outlived the lifetime of the function. +A borrow of a thread-local variable was made inside a function which outlived +the lifetime of the function. Erroneous code example: diff --git a/src/librustc_expand/base.rs b/src/librustc_expand/base.rs index 757eee8bb46e1..db9293bddeb7d 100644 --- a/src/librustc_expand/base.rs +++ b/src/librustc_expand/base.rs @@ -4,14 +4,14 @@ use crate::module::DirectoryOwnership; use rustc_ast::ast::{self, Attribute, NodeId, PatKind}; use rustc_ast::mut_visit::{self, MutVisitor}; use rustc_ast::ptr::P; -use rustc_ast::token::{self, FlattenGroup}; -use rustc_ast::tokenstream::{self, TokenStream, TokenTree}; +use rustc_ast::token; +use rustc_ast::tokenstream::{self, TokenStream}; use rustc_ast::visit::{AssocCtxt, Visitor}; use rustc_attr::{self as attr, Deprecation, HasAttrs, Stability}; use rustc_data_structures::fx::FxHashMap; use rustc_data_structures::sync::{self, Lrc}; use rustc_errors::{DiagnosticBuilder, ErrorReported}; -use rustc_parse::{self, parser, MACRO_ARGUMENTS}; +use rustc_parse::{self, nt_to_tokenstream, parser, MACRO_ARGUMENTS}; use rustc_session::{parse::ParseSess, Limit}; use rustc_span::def_id::DefId; use rustc_span::edition::Edition; @@ -120,10 +120,7 @@ impl Annotatable { } } - crate fn into_tokens(self) -> TokenStream { - // `Annotatable` can be converted into tokens directly, but we - // are packing it into a nonterminal as a piece of AST to make - // the produced token stream look nicer in pretty-printed form. + crate fn into_tokens(self, sess: &ParseSess) -> TokenStream { let nt = match self { Annotatable::Item(item) => token::NtItem(item), Annotatable::TraitItem(item) | Annotatable::ImplItem(item) => { @@ -142,7 +139,7 @@ impl Annotatable { | Annotatable::StructField(..) | Annotatable::Variant(..) => panic!("unexpected annotatable"), }; - TokenTree::token(token::Interpolated(Lrc::new(nt), FlattenGroup::Yes), DUMMY_SP).into() + nt_to_tokenstream(&nt, sess, DUMMY_SP) } pub fn expect_item(self) -> P { @@ -374,7 +371,7 @@ where impl MutVisitor for AvoidInterpolatedIdents { fn visit_tt(&mut self, tt: &mut tokenstream::TokenTree) { if let tokenstream::TokenTree::Token(token) = tt { - if let token::Interpolated(nt, _) = &token.kind { + if let token::Interpolated(nt) = &token.kind { if let token::NtIdent(ident, is_raw) = **nt { *tt = tokenstream::TokenTree::token( token::Ident(ident.name, is_raw), diff --git a/src/librustc_expand/expand.rs b/src/librustc_expand/expand.rs index 4e41bd4bbfa08..bd7a094c5e355 100644 --- a/src/librustc_expand/expand.rs +++ b/src/librustc_expand/expand.rs @@ -705,7 +705,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> { SyntaxExtensionKind::Attr(expander) => { self.gate_proc_macro_input(&item); self.gate_proc_macro_attr_item(span, &item); - let tokens = item.into_tokens(); + let tokens = item.into_tokens(self.cx.parse_sess); let attr_item = attr.unwrap_normal_item(); if let MacArgs::Eq(..) = attr_item.args { self.cx.span_err(span, "key-value macro attributes are not supported"); diff --git a/src/librustc_expand/mbe/macro_parser.rs b/src/librustc_expand/mbe/macro_parser.rs index c90a438c25ece..3c15a81c67f67 100644 --- a/src/librustc_expand/mbe/macro_parser.rs +++ b/src/librustc_expand/mbe/macro_parser.rs @@ -790,7 +790,7 @@ fn may_begin_with(token: &Token, name: Symbol) -> bool { }, sym::block => match token.kind { token::OpenDelim(token::Brace) => true, - token::Interpolated(ref nt, _) => match **nt { + token::Interpolated(ref nt) => match **nt { token::NtItem(_) | token::NtPat(_) | token::NtTy(_) @@ -804,7 +804,7 @@ fn may_begin_with(token: &Token, name: Symbol) -> bool { }, sym::path | sym::meta => match token.kind { token::ModSep | token::Ident(..) => true, - token::Interpolated(ref nt, _) => match **nt { + token::Interpolated(ref nt) => match **nt { token::NtPath(_) | token::NtMeta(_) => true, _ => may_be_ident(&nt), }, @@ -823,12 +823,12 @@ fn may_begin_with(token: &Token, name: Symbol) -> bool { token::ModSep | // path token::Lt | // path (UFCS constant) token::BinOp(token::Shl) => true, // path (double UFCS) - token::Interpolated(ref nt, _) => may_be_ident(nt), + token::Interpolated(ref nt) => may_be_ident(nt), _ => false, }, sym::lifetime => match token.kind { token::Lifetime(_) => true, - token::Interpolated(ref nt, _) => match **nt { + token::Interpolated(ref nt) => match **nt { token::NtLifetime(_) | token::NtTT(_) => true, _ => false, }, diff --git a/src/librustc_expand/mbe/macro_rules.rs b/src/librustc_expand/mbe/macro_rules.rs index 8cdb5b09c9e8b..7101525b30977 100644 --- a/src/librustc_expand/mbe/macro_rules.rs +++ b/src/librustc_expand/mbe/macro_rules.rs @@ -387,6 +387,7 @@ pub fn compile_declarative_macro( def: &ast::Item, edition: Edition, ) -> SyntaxExtension { + debug!("compile_declarative_macro: {:?}", def); let mk_syn_ext = |expander| { SyntaxExtension::new( sess, diff --git a/src/librustc_expand/mbe/quoted.rs b/src/librustc_expand/mbe/quoted.rs index de66c2ada40e6..09306f26ee0ad 100644 --- a/src/librustc_expand/mbe/quoted.rs +++ b/src/librustc_expand/mbe/quoted.rs @@ -90,7 +90,7 @@ pub(super) fn parse( /// # Parameters /// /// - `tree`: the tree we wish to convert. -/// - `trees`: an iterator over trees. We may need to read more tokens from it in order to finish +/// - `outer_trees`: an iterator over trees. We may need to read more tokens from it in order to finish /// converting `tree` /// - `expect_matchers`: same as for `parse` (see above). /// - `sess`: the parsing session. Any errors will be emitted to this session. @@ -98,7 +98,7 @@ pub(super) fn parse( /// unstable features or not. fn parse_tree( tree: tokenstream::TokenTree, - trees: &mut impl Iterator, + outer_trees: &mut impl Iterator, expect_matchers: bool, sess: &ParseSess, node_id: NodeId, @@ -106,56 +106,72 @@ fn parse_tree( // Depending on what `tree` is, we could be parsing different parts of a macro match tree { // `tree` is a `$` token. Look at the next token in `trees` - tokenstream::TokenTree::Token(Token { kind: token::Dollar, span }) => match trees.next() { - // `tree` is followed by a delimited set of token trees. This indicates the beginning - // of a repetition sequence in the macro (e.g. `$(pat)*`). - Some(tokenstream::TokenTree::Delimited(span, delim, tts)) => { - // Must have `(` not `{` or `[` - if delim != token::Paren { - let tok = pprust::token_kind_to_string(&token::OpenDelim(delim)); - let msg = format!("expected `(`, found `{}`", tok); - sess.span_diagnostic.span_err(span.entire(), &msg); - } - // Parse the contents of the sequence itself - let sequence = parse(tts, expect_matchers, sess, node_id); - // Get the Kleene operator and optional separator - let (separator, kleene) = parse_sep_and_kleene_op(trees, span.entire(), sess); - // Count the number of captured "names" (i.e., named metavars) - let name_captures = macro_parser::count_names(&sequence); - TokenTree::Sequence( - span, - Lrc::new(SequenceRepetition { - tts: sequence, - separator, - kleene, - num_captures: name_captures, - }), - ) + tokenstream::TokenTree::Token(Token { kind: token::Dollar, span }) => { + // FIXME: Handle `None`-delimited groups in a more systematic way + // during parsing. + let mut next = outer_trees.next(); + let mut trees: Box>; + if let Some(tokenstream::TokenTree::Delimited(_, token::NoDelim, tts)) = next { + trees = Box::new(tts.into_trees()); + next = trees.next(); + } else { + trees = Box::new(outer_trees); } - // `tree` is followed by an `ident`. This could be `$meta_var` or the `$crate` special - // metavariable that names the crate of the invocation. - Some(tokenstream::TokenTree::Token(token)) if token.is_ident() => { - let (ident, is_raw) = token.ident().unwrap(); - let span = ident.span.with_lo(span.lo()); - if ident.name == kw::Crate && !is_raw { - TokenTree::token(token::Ident(kw::DollarCrate, is_raw), span) - } else { - TokenTree::MetaVar(span, ident) + match next { + // `tree` is followed by a delimited set of token trees. This indicates the beginning + // of a repetition sequence in the macro (e.g. `$(pat)*`). + Some(tokenstream::TokenTree::Delimited(span, delim, tts)) => { + // Must have `(` not `{` or `[` + if delim != token::Paren { + let tok = pprust::token_kind_to_string(&token::OpenDelim(delim)); + let msg = format!("expected `(`, found `{}`", tok); + sess.span_diagnostic.span_err(span.entire(), &msg); + } + // Parse the contents of the sequence itself + let sequence = parse(tts, expect_matchers, sess, node_id); + // Get the Kleene operator and optional separator + let (separator, kleene) = + parse_sep_and_kleene_op(&mut trees, span.entire(), sess); + // Count the number of captured "names" (i.e., named metavars) + let name_captures = macro_parser::count_names(&sequence); + TokenTree::Sequence( + span, + Lrc::new(SequenceRepetition { + tts: sequence, + separator, + kleene, + num_captures: name_captures, + }), + ) } - } - // `tree` is followed by a random token. This is an error. - Some(tokenstream::TokenTree::Token(token)) => { - let msg = - format!("expected identifier, found `{}`", pprust::token_to_string(&token),); - sess.span_diagnostic.span_err(token.span, &msg); - TokenTree::MetaVar(token.span, Ident::invalid()) - } + // `tree` is followed by an `ident`. This could be `$meta_var` or the `$crate` special + // metavariable that names the crate of the invocation. + Some(tokenstream::TokenTree::Token(token)) if token.is_ident() => { + let (ident, is_raw) = token.ident().unwrap(); + let span = ident.span.with_lo(span.lo()); + if ident.name == kw::Crate && !is_raw { + TokenTree::token(token::Ident(kw::DollarCrate, is_raw), span) + } else { + TokenTree::MetaVar(span, ident) + } + } - // There are no more tokens. Just return the `$` we already have. - None => TokenTree::token(token::Dollar, span), - }, + // `tree` is followed by a random token. This is an error. + Some(tokenstream::TokenTree::Token(token)) => { + let msg = format!( + "expected identifier, found `{}`", + pprust::token_to_string(&token), + ); + sess.span_diagnostic.span_err(token.span, &msg); + TokenTree::MetaVar(token.span, Ident::invalid()) + } + + // There are no more tokens. Just return the `$` we already have. + None => TokenTree::token(token::Dollar, span), + } + } // `tree` is an arbitrary token. Keep it. tokenstream::TokenTree::Token(token) => TokenTree::Token(token), diff --git a/src/librustc_expand/mbe/transcribe.rs b/src/librustc_expand/mbe/transcribe.rs index 486f0a6420d6e..e2d3d5c4d644e 100644 --- a/src/librustc_expand/mbe/transcribe.rs +++ b/src/librustc_expand/mbe/transcribe.rs @@ -4,7 +4,7 @@ use crate::mbe::macro_parser::{MatchedNonterminal, MatchedSeq, NamedMatch}; use rustc_ast::ast::MacCall; use rustc_ast::mut_visit::{self, MutVisitor}; -use rustc_ast::token::{self, FlattenGroup, NtTT, Token}; +use rustc_ast::token::{self, NtTT, Token}; use rustc_ast::tokenstream::{DelimSpan, TokenStream, TokenTree, TreeAndJoint}; use rustc_data_structures::fx::FxHashMap; use rustc_data_structures::sync::Lrc; @@ -240,10 +240,7 @@ pub(super) fn transcribe<'a>( result.push(tt.clone().into()); } else { marker.visit_span(&mut sp); - let token = TokenTree::token( - token::Interpolated(nt.clone(), FlattenGroup::No), - sp, - ); + let token = TokenTree::token(token::Interpolated(nt.clone()), sp); result.push(token.into()); } } else { diff --git a/src/librustc_expand/proc_macro.rs b/src/librustc_expand/proc_macro.rs index 1e26c832a2621..54012d62a72a7 100644 --- a/src/librustc_expand/proc_macro.rs +++ b/src/librustc_expand/proc_macro.rs @@ -2,10 +2,11 @@ use crate::base::{self, *}; use crate::proc_macro_server; use rustc_ast::ast::{self, ItemKind, MetaItemKind, NestedMetaItem}; -use rustc_ast::token::{self, FlattenGroup}; -use rustc_ast::tokenstream::{self, TokenStream}; +use rustc_ast::token; +use rustc_ast::tokenstream::{TokenStream, TokenTree}; use rustc_data_structures::sync::Lrc; use rustc_errors::{Applicability, ErrorReported}; +use rustc_parse::nt_to_tokenstream; use rustc_span::symbol::sym; use rustc_span::{Span, DUMMY_SP}; @@ -102,8 +103,12 @@ impl MultiItemModifier for ProcMacroDerive { } } - let token = token::Interpolated(Lrc::new(token::NtItem(item)), FlattenGroup::Yes); - let input = tokenstream::TokenTree::token(token, DUMMY_SP).into(); + let item = token::NtItem(item); + let input = if item.pretty_printing_compatibility_hack() { + TokenTree::token(token::Interpolated(Lrc::new(item)), DUMMY_SP).into() + } else { + nt_to_tokenstream(&item, ecx.parse_sess, DUMMY_SP) + }; let server = proc_macro_server::Rustc::new(ecx); let stream = match self.client.run(&EXEC_STRATEGY, server, input) { diff --git a/src/librustc_expand/proc_macro_server.rs b/src/librustc_expand/proc_macro_server.rs index c88b5a37f718a..663bc508143d6 100644 --- a/src/librustc_expand/proc_macro_server.rs +++ b/src/librustc_expand/proc_macro_server.rs @@ -1,7 +1,7 @@ use crate::base::ExtCtxt; use rustc_ast::ast; -use rustc_ast::token::{self, FlattenGroup}; +use rustc_ast::token; use rustc_ast::tokenstream::{self, DelimSpan, IsJoint::*, TokenStream, TreeAndJoint}; use rustc_ast::util::comments; use rustc_ast_pretty::pprust; @@ -60,12 +60,7 @@ impl FromInternal<(TreeAndJoint, &'_ ParseSess, &'_ mut Vec)> let Token { kind, span } = match tree { tokenstream::TokenTree::Delimited(span, delim, tts) => { let delimiter = Delimiter::from_internal(delim); - return TokenTree::Group(Group { - delimiter, - stream: tts, - span, - flatten: FlattenGroup::No, - }); + return TokenTree::Group(Group { delimiter, stream: tts, span, flatten: false }); } tokenstream::TokenTree::Token(token) => token, }; @@ -172,7 +167,7 @@ impl FromInternal<(TreeAndJoint, &'_ ParseSess, &'_ mut Vec)> delimiter: Delimiter::Bracket, stream, span: DelimSpan::from_single(span), - flatten: FlattenGroup::No, + flatten: false, })); if style == ast::AttrStyle::Inner { stack.push(tt!(Punct::new('!', false))); @@ -180,13 +175,13 @@ impl FromInternal<(TreeAndJoint, &'_ ParseSess, &'_ mut Vec)> tt!(Punct::new('#', false)) } - Interpolated(nt, flatten) => { + Interpolated(nt) => { let stream = nt_to_tokenstream(&nt, sess, span); TokenTree::Group(Group { delimiter: Delimiter::None, stream, span: DelimSpan::from_single(span), - flatten, + flatten: nt.pretty_printing_compatibility_hack(), }) } @@ -293,7 +288,7 @@ pub struct Group { /// A hack used to pass AST fragments to attribute and derive macros /// as a single nonterminal token instead of a token stream. /// FIXME: It needs to be removed, but there are some compatibility issues (see #73345). - flatten: FlattenGroup, + flatten: bool, } #[derive(Copy, Clone, PartialEq, Eq, Hash)] @@ -453,7 +448,7 @@ impl server::TokenStreamIter for Rustc<'_> { // Such token needs to be "unwrapped" and not represented as a delimited group. // FIXME: It needs to be removed, but there are some compatibility issues (see #73345). if let TokenTree::Group(ref group) = tree { - if matches!(group.flatten, FlattenGroup::Yes) { + if group.flatten { iter.cursor.append(group.stream.clone()); continue; } @@ -469,7 +464,7 @@ impl server::Group for Rustc<'_> { delimiter, stream, span: DelimSpan::from_single(server::Span::call_site(self)), - flatten: FlattenGroup::No, + flatten: false, } } fn delimiter(&mut self, group: &Self::Group) -> Delimiter { diff --git a/src/librustc_interface/tests.rs b/src/librustc_interface/tests.rs index d861b444c8816..e35dbbc0c2f24 100644 --- a/src/librustc_interface/tests.rs +++ b/src/librustc_interface/tests.rs @@ -500,7 +500,6 @@ fn test_debugging_options_tracking_hash() { untracked!(print_link_args, true); untracked!(print_llvm_passes, true); untracked!(print_mono_items, Some(String::from("abc"))); - untracked!(print_region_graph, true); untracked!(print_type_sizes, true); untracked!(query_dep_graph, true); untracked!(query_stats, true); diff --git a/src/librustc_lexer/src/lib.rs b/src/librustc_lexer/src/lib.rs index 77b3d26463dfe..2f4b1bbd3ba0f 100644 --- a/src/librustc_lexer/src/lib.rs +++ b/src/librustc_lexer/src/lib.rs @@ -51,8 +51,9 @@ pub enum TokenKind { // Multi-char tokens: /// "// comment" LineComment, - /// "/* block comment */" - /// Block comments can be recursive, so the sequence like "/* /* */" + /// `/* block comment */` + /// + /// Block comments can be recursive, so the sequence like `/* /* */` /// will not be considered terminated and will result in a parsing error. BlockComment { terminated: bool }, /// Any whitespace characters sequence. diff --git a/src/librustc_metadata/rmeta/decoder/cstore_impl.rs b/src/librustc_metadata/rmeta/decoder/cstore_impl.rs index 1b168bf01178c..abbe45fe02e25 100644 --- a/src/librustc_metadata/rmeta/decoder/cstore_impl.rs +++ b/src/librustc_metadata/rmeta/decoder/cstore_impl.rs @@ -239,6 +239,8 @@ provide! { <'tcx> tcx, def_id, other, cdata, syms } + + crate_extern_paths => { cdata.source().paths().cloned().collect() } } pub fn provide(providers: &mut Providers<'_>) { diff --git a/src/librustc_middle/query/mod.rs b/src/librustc_middle/query/mod.rs index ba5a8c3ec2052..e7f9ad9d1cfd7 100644 --- a/src/librustc_middle/query/mod.rs +++ b/src/librustc_middle/query/mod.rs @@ -1042,6 +1042,10 @@ rustc_queries! { eval_always desc { "looking up the extra filename for a crate" } } + query crate_extern_paths(_: CrateNum) -> Vec { + eval_always + desc { "looking up the paths for extern crates" } + } } TypeChecking { diff --git a/src/librustc_middle/ty/context.rs b/src/librustc_middle/ty/context.rs index e2f601371b1ee..cee4efaa46518 100644 --- a/src/librustc_middle/ty/context.rs +++ b/src/librustc_middle/ty/context.rs @@ -1049,6 +1049,7 @@ impl<'tcx> TyCtxt<'tcx> { Some(attr) => attr, None => return Bound::Unbounded, }; + debug!("layout_scalar_valid_range: attr={:?}", attr); for meta in attr.meta_item_list().expect("rustc_layout_scalar_valid_range takes args") { match meta.literal().expect("attribute takes lit").kind { ast::LitKind::Int(a, _) => return Bound::Included(a), @@ -1169,7 +1170,7 @@ impl<'tcx> TyCtxt<'tcx> { self.ty_error_with_message(DUMMY_SP, "TyKind::Error constructed but no error reported") } - /// Constructs a `TyKind::Error` type and registers a `delay_span_bug` with the given `msg to + /// Constructs a `TyKind::Error` type and registers a `delay_span_bug` with the given `msg` to /// ensure it gets used. #[track_caller] pub fn ty_error_with_message>(self, span: S, msg: &str) -> Ty<'tcx> { diff --git a/src/librustc_middle/ty/query/mod.rs b/src/librustc_middle/ty/query/mod.rs index 35d19b7603faf..2ad49b1acce43 100644 --- a/src/librustc_middle/ty/query/mod.rs +++ b/src/librustc_middle/ty/query/mod.rs @@ -57,6 +57,7 @@ use rustc_span::{Span, DUMMY_SP}; use std::borrow::Cow; use std::collections::BTreeMap; use std::ops::Deref; +use std::path::PathBuf; use std::sync::Arc; #[macro_use] diff --git a/src/librustc_parse/parser/attr.rs b/src/librustc_parse/parser/attr.rs index b8cb146145b98..803f14a2a228a 100644 --- a/src/librustc_parse/parser/attr.rs +++ b/src/librustc_parse/parser/attr.rs @@ -155,7 +155,7 @@ impl<'a> Parser<'a> { /// The delimiters or `=` are still put into the resulting token stream. pub fn parse_attr_item(&mut self) -> PResult<'a, ast::AttrItem> { let item = match self.token.kind { - token::Interpolated(ref nt, _) => match **nt { + token::Interpolated(ref nt) => match **nt { Nonterminal::NtMeta(ref item) => Some(item.clone().into_inner()), _ => None, }, @@ -254,7 +254,7 @@ impl<'a> Parser<'a> { /// meta_item_inner : (meta_item | UNSUFFIXED_LIT) (',' meta_item_inner)? ; pub fn parse_meta_item(&mut self) -> PResult<'a, ast::MetaItem> { let nt_meta = match self.token.kind { - token::Interpolated(ref nt, _) => match **nt { + token::Interpolated(ref nt) => match **nt { token::NtMeta(ref e) => Some(e.clone()), _ => None, }, diff --git a/src/librustc_parse/parser/diagnostics.rs b/src/librustc_parse/parser/diagnostics.rs index e27bbc532cfc4..16a118cb48c91 100644 --- a/src/librustc_parse/parser/diagnostics.rs +++ b/src/librustc_parse/parser/diagnostics.rs @@ -376,7 +376,14 @@ impl<'a> Parser<'a> { /// let _ = vec![1, 2, 3].into_iter().collect::>>>(); /// ^^ help: remove extra angle brackets /// ``` - pub(super) fn check_trailing_angle_brackets(&mut self, segment: &PathSegment, end: TokenKind) { + /// + /// If `true` is returned, then trailing brackets were recovered, tokens were consumed + /// up until one of the tokens in 'end' was encountered, and an error was emitted. + pub(super) fn check_trailing_angle_brackets( + &mut self, + segment: &PathSegment, + end: &[&TokenKind], + ) -> bool { // This function is intended to be invoked after parsing a path segment where there are two // cases: // @@ -409,7 +416,7 @@ impl<'a> Parser<'a> { parsed_angle_bracket_args, ); if !parsed_angle_bracket_args { - return; + return false; } // Keep the span at the start so we can highlight the sequence of `>` characters to be @@ -447,18 +454,18 @@ impl<'a> Parser<'a> { number_of_gt, number_of_shr, ); if number_of_gt < 1 && number_of_shr < 1 { - return; + return false; } // Finally, double check that we have our end token as otherwise this is the // second case. if self.look_ahead(position, |t| { trace!("check_trailing_angle_brackets: t={:?}", t); - *t == end + end.contains(&&t.kind) }) { // Eat from where we started until the end token so that parsing can continue // as if we didn't have those extra angle brackets. - self.eat_to_tokens(&[&end]); + self.eat_to_tokens(end); let span = lo.until(self.token.span); let total_num_of_gt = number_of_gt + number_of_shr * 2; @@ -473,7 +480,9 @@ impl<'a> Parser<'a> { Applicability::MachineApplicable, ) .emit(); + return true; } + false } /// Check to see if a pair of chained operators looks like an attempt at chained comparison, diff --git a/src/librustc_parse/parser/expr.rs b/src/librustc_parse/parser/expr.rs index 2745b18a8cd51..abb444933536f 100644 --- a/src/librustc_parse/parser/expr.rs +++ b/src/librustc_parse/parser/expr.rs @@ -26,7 +26,7 @@ use std::mem; /// `token::Interpolated` tokens. macro_rules! maybe_whole_expr { ($p:expr) => { - if let token::Interpolated(nt, _) = &$p.token.kind { + if let token::Interpolated(nt) = &$p.token.kind { match &**nt { token::NtExpr(e) | token::NtLiteral(e) => { let e = e.clone(); @@ -867,7 +867,7 @@ impl<'a> Parser<'a> { let fn_span_lo = self.token.span; let segment = self.parse_path_segment(PathStyle::Expr)?; - self.check_trailing_angle_brackets(&segment, token::OpenDelim(token::Paren)); + self.check_trailing_angle_brackets(&segment, &[&token::OpenDelim(token::Paren)]); if self.check(&token::OpenDelim(token::Paren)) { // Method call `expr.f()` diff --git a/src/librustc_parse/parser/item.rs b/src/librustc_parse/parser/item.rs index 10df16964da08..5923a185dcf93 100644 --- a/src/librustc_parse/parser/item.rs +++ b/src/librustc_parse/parser/item.rs @@ -9,7 +9,7 @@ use rustc_ast::ast::{AssocItem, AssocItemKind, ForeignItemKind, Item, ItemKind, use rustc_ast::ast::{Async, Const, Defaultness, IsAuto, Mutability, Unsafe, UseTree, UseTreeKind}; use rustc_ast::ast::{BindingMode, Block, FnDecl, FnSig, Param, SelfKind}; use rustc_ast::ast::{EnumDef, Generics, StructField, TraitRef, Ty, TyKind, Variant, VariantData}; -use rustc_ast::ast::{FnHeader, ForeignItem, PathSegment, Visibility, VisibilityKind}; +use rustc_ast::ast::{FnHeader, ForeignItem, Path, PathSegment, Visibility, VisibilityKind}; use rustc_ast::ast::{MacArgs, MacCall, MacDelimiter}; use rustc_ast::ptr::P; use rustc_ast::token::{self, TokenKind}; @@ -1262,6 +1262,25 @@ impl<'a> Parser<'a> { sp, &format!("expected `,`, or `}}`, found {}", super::token_descr(&self.token)), ); + + // Try to recover extra trailing angle brackets + let mut recovered = false; + if let TyKind::Path(_, Path { segments, .. }) = &a_var.ty.kind { + if let Some(last_segment) = segments.last() { + recovered = self.check_trailing_angle_brackets( + last_segment, + &[&token::Comma, &token::CloseDelim(token::Brace)], + ); + if recovered { + // Handle a case like `Vec>,` where we can continue parsing fields + // after the comma + self.eat(&token::Comma); + // `check_trailing_angle_brackets` already emitted a nicer error + err.cancel(); + } + } + } + if self.token.is_ident() { // This is likely another field; emit the diagnostic and keep going err.span_suggestion( @@ -1271,6 +1290,14 @@ impl<'a> Parser<'a> { Applicability::MachineApplicable, ); err.emit(); + recovered = true; + } + + if recovered { + // Make sure an error was emitted (either by recovering an angle bracket, + // or by finding an identifier as the next token), since we're + // going to continue parsing + assert!(self.sess.span_diagnostic.has_errors()); } else { return Err(err); } @@ -1780,7 +1807,7 @@ impl<'a> Parser<'a> { fn is_named_param(&self) -> bool { let offset = match self.token.kind { - token::Interpolated(ref nt, _) => match **nt { + token::Interpolated(ref nt) => match **nt { token::NtPat(..) => return self.look_ahead(1, |t| t == &token::Colon), _ => 0, }, diff --git a/src/librustc_parse/parser/mod.rs b/src/librustc_parse/parser/mod.rs index 04074479a21a4..7811d5fb741b2 100644 --- a/src/librustc_parse/parser/mod.rs +++ b/src/librustc_parse/parser/mod.rs @@ -54,7 +54,7 @@ enum BlockMode { #[macro_export] macro_rules! maybe_whole { ($p:expr, $constructor:ident, |$x:ident| $e:expr) => { - if let token::Interpolated(nt, _) = &$p.token.kind { + if let token::Interpolated(nt) = &$p.token.kind { if let token::$constructor(x) = &**nt { let $x = x.clone(); $p.bump(); @@ -69,7 +69,7 @@ macro_rules! maybe_whole { macro_rules! maybe_recover_from_interpolated_ty_qpath { ($self: expr, $allow_qpath_recovery: expr) => { if $allow_qpath_recovery && $self.look_ahead(1, |t| t == &token::ModSep) { - if let token::Interpolated(nt, _) = &$self.token.kind { + if let token::Interpolated(nt) = &$self.token.kind { if let token::NtTy(ty) = &**nt { let ty = ty.clone(); $self.bump(); @@ -922,7 +922,7 @@ impl<'a> Parser<'a> { if self.eat(&token::Eq) { let eq_span = self.prev_token.span; let mut is_interpolated_expr = false; - if let token::Interpolated(nt, _) = &self.token.kind { + if let token::Interpolated(nt) = &self.token.kind { if let token::NtExpr(..) = **nt { is_interpolated_expr = true; } diff --git a/src/librustc_parse/parser/pat.rs b/src/librustc_parse/parser/pat.rs index 742183d369735..6603d0afc0248 100644 --- a/src/librustc_parse/parser/pat.rs +++ b/src/librustc_parse/parser/pat.rs @@ -515,7 +515,7 @@ impl<'a> Parser<'a> { self.recover_additional_muts(); // Make sure we don't allow e.g. `let mut $p;` where `$p:pat`. - if let token::Interpolated(ref nt, _) = self.token.kind { + if let token::Interpolated(ref nt) = self.token.kind { if let token::NtPat(_) = **nt { self.expected_ident_found().emit(); } diff --git a/src/librustc_parse/parser/path.rs b/src/librustc_parse/parser/path.rs index 5210614548da3..67e9b3af4a8cf 100644 --- a/src/librustc_parse/parser/path.rs +++ b/src/librustc_parse/parser/path.rs @@ -169,7 +169,7 @@ impl<'a> Parser<'a> { // `PathStyle::Expr` is only provided at the root invocation and never in // `parse_path_segment` to recurse and therefore can be checked to maintain // this invariant. - self.check_trailing_angle_brackets(&segment, token::ModSep); + self.check_trailing_angle_brackets(&segment, &[&token::ModSep]); } segments.push(segment); diff --git a/src/librustc_passes/lang_items.rs b/src/librustc_passes/lang_items.rs index 0be37cb096038..0326591a931f5 100644 --- a/src/librustc_passes/lang_items.rs +++ b/src/librustc_passes/lang_items.rs @@ -146,6 +146,28 @@ impl LanguageItemCollector<'tcx> { )); } } + let mut note_def = |which, def_id: DefId| { + let crate_name = self.tcx.crate_name(def_id.krate); + let note = if def_id.is_local() { + format!("{} definition in the local crate (`{}`)", which, crate_name) + } else { + let paths: Vec<_> = self + .tcx + .crate_extern_paths(def_id.krate) + .iter() + .map(|p| p.display().to_string()) + .collect(); + format!( + "{} definition in `{}` loaded from {}", + which, + crate_name, + paths.join(", ") + ) + }; + err.note(¬e); + }; + note_def("first", original_def_id); + note_def("second", item_def_id); } err.emit(); } diff --git a/src/librustc_resolve/build_reduced_graph.rs b/src/librustc_resolve/build_reduced_graph.rs index a92d451dfd006..bca65c63e9198 100644 --- a/src/librustc_resolve/build_reduced_graph.rs +++ b/src/librustc_resolve/build_reduced_graph.rs @@ -1325,7 +1325,7 @@ impl<'a, 'b> Visitor<'b> for BuildReducedGraphVisitor<'a, 'b> { } fn visit_token(&mut self, t: Token) { - if let token::Interpolated(nt, _) = t.kind { + if let token::Interpolated(nt) = t.kind { if let token::NtExpr(ref expr) = *nt { if let ast::ExprKind::MacCall(..) = expr.kind { self.visit_invoc(expr.id); diff --git a/src/librustc_resolve/def_collector.rs b/src/librustc_resolve/def_collector.rs index c25a20210e026..32af920020ce6 100644 --- a/src/librustc_resolve/def_collector.rs +++ b/src/librustc_resolve/def_collector.rs @@ -256,7 +256,7 @@ impl<'a, 'b> visit::Visitor<'a> for DefCollector<'a, 'b> { } fn visit_token(&mut self, t: Token) { - if let token::Interpolated(nt, _) = t.kind { + if let token::Interpolated(nt) = t.kind { if let token::NtExpr(ref expr) = *nt { if let ExprKind::MacCall(..) = expr.kind { self.visit_macro_invoc(expr.id); diff --git a/src/librustc_session/options.rs b/src/librustc_session/options.rs index 9337f241d7022..ed5fd6dc7028b 100644 --- a/src/librustc_session/options.rs +++ b/src/librustc_session/options.rs @@ -958,9 +958,6 @@ options! {DebuggingOptions, DebuggingSetter, basic_debugging_options, "print the LLVM optimization passes being run (default: no)"), print_mono_items: Option = (None, parse_opt_string, [UNTRACKED], "print the result of the monomorphization collection pass"), - print_region_graph: bool = (false, parse_bool, [UNTRACKED], - "prints region inference graph. \ - Use with RUST_REGION_GRAPH=help for more info (default: no)"), print_type_sizes: bool = (false, parse_bool, [UNTRACKED], "print layout information for each type encountered (default: no)"), profile: bool = (false, parse_bool, [TRACKED], diff --git a/src/librustc_span/symbol.rs b/src/librustc_span/symbol.rs index e2f0d0b94c46c..33f14e50edbf3 100644 --- a/src/librustc_span/symbol.rs +++ b/src/librustc_span/symbol.rs @@ -401,6 +401,7 @@ symbols! { infer_outlives_requirements, infer_static_outlives_requirements, inline, + Input, intel, into_iter, IntoIterator, @@ -589,6 +590,8 @@ symbols! { proc_macro_mod, proc_macro_non_items, proc_macro_path_invoc, + ProceduralMasqueradeDummyType, + ProcMacroHack, profiler_builtins, profiler_runtime, ptr_guaranteed_eq, diff --git a/src/librustc_trait_selection/traits/error_reporting/mod.rs b/src/librustc_trait_selection/traits/error_reporting/mod.rs index 49e43873df759..ad6e81ed3e889 100644 --- a/src/librustc_trait_selection/traits/error_reporting/mod.rs +++ b/src/librustc_trait_selection/traits/error_reporting/mod.rs @@ -429,6 +429,24 @@ impl<'a, 'tcx> InferCtxtExt<'tcx> for InferCtxt<'a, 'tcx> { ); } + let is_fn_trait = [ + self.tcx.lang_items().fn_trait(), + self.tcx.lang_items().fn_mut_trait(), + self.tcx.lang_items().fn_once_trait(), + ] + .contains(&Some(trait_ref.def_id())); + let is_target_feature_fn = + if let ty::FnDef(def_id, _) = trait_ref.skip_binder().self_ty().kind { + !self.tcx.codegen_fn_attrs(def_id).target_features.is_empty() + } else { + false + }; + if is_fn_trait && is_target_feature_fn { + err.note( + "`#[target_feature]` functions do not implement the `Fn` traits", + ); + } + // Try to report a help message if !trait_ref.has_infer_types_or_consts() && self.predicate_can_apply(obligation.param_env, trait_ref) diff --git a/src/librustc_trait_selection/traits/select/candidate_assembly.rs b/src/librustc_trait_selection/traits/select/candidate_assembly.rs index 91c162872b215..597a7a58022cd 100644 --- a/src/librustc_trait_selection/traits/select/candidate_assembly.rs +++ b/src/librustc_trait_selection/traits/select/candidate_assembly.rs @@ -306,7 +306,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { candidates.ambiguous = true; // Could wind up being a fn() type. } // Provide an impl, but only for suitable `fn` pointers. - ty::FnDef(..) | ty::FnPtr(_) => { + ty::FnPtr(_) => { if let ty::FnSig { unsafety: hir::Unsafety::Normal, abi: Abi::Rust, @@ -317,6 +317,20 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { candidates.vec.push(FnPointerCandidate); } } + // Provide an impl for suitable functions, rejecting `#[target_feature]` functions (RFC 2396). + ty::FnDef(def_id, _) => { + if let ty::FnSig { + unsafety: hir::Unsafety::Normal, + abi: Abi::Rust, + c_variadic: false, + .. + } = self_ty.fn_sig(self.tcx()).skip_binder() + { + if self.tcx().codegen_fn_attrs(def_id).target_features.is_empty() { + candidates.vec.push(FnPointerCandidate); + } + } + } _ => {} } diff --git a/src/libstd/fs.rs b/src/libstd/fs.rs index 17f890375f843..4d031cb7a52e4 100644 --- a/src/libstd/fs.rs +++ b/src/libstd/fs.rs @@ -8,6 +8,7 @@ //! extension traits of `std::os::$platform`. #![stable(feature = "rust1", since = "1.0.0")] +#![deny(unsafe_op_in_unsafe_fn)] use crate::ffi::OsString; use crate::fmt; @@ -666,7 +667,8 @@ impl Read for File { #[inline] unsafe fn initializer(&self) -> Initializer { - Initializer::nop() + // SAFETY: Read is guaranteed to work on uninitialized memory + unsafe { Initializer::nop() } } } #[stable(feature = "rust1", since = "1.0.0")] @@ -711,7 +713,8 @@ impl Read for &File { #[inline] unsafe fn initializer(&self) -> Initializer { - Initializer::nop() + // SAFETY: Read is guaranteed to work on uninitialized memory + unsafe { Initializer::nop() } } } #[stable(feature = "rust1", since = "1.0.0")] diff --git a/src/libstd/lib.rs b/src/libstd/lib.rs index 372038df54f2e..bd585d39c242f 100644 --- a/src/libstd/lib.rs +++ b/src/libstd/lib.rs @@ -319,6 +319,7 @@ #![cfg_attr(bootstrap, feature(track_caller))] #![feature(try_reserve)] #![feature(unboxed_closures)] +#![feature(unsafe_block_in_unsafe_fn)] #![feature(untagged_unions)] #![feature(unwind_attributes)] #![feature(vec_into_raw_parts)] diff --git a/src/test/run-make/rustc-macro-dep-files/foo.rs b/src/test/run-make/rustc-macro-dep-files/foo.rs index 00b1c26d43f10..66db1a2173665 100644 --- a/src/test/run-make/rustc-macro-dep-files/foo.rs +++ b/src/test/run-make/rustc-macro-dep-files/foo.rs @@ -7,6 +7,6 @@ use proc_macro::TokenStream; #[proc_macro_derive(A)] pub fn derive(input: TokenStream) -> TokenStream { let input = input.to_string(); - assert!(input.contains("struct A;")); + assert!(input.contains("struct A ;")); "struct B;".parse().unwrap() } diff --git a/src/test/ui/async-await/issues/issue-60674.stdout b/src/test/ui/async-await/issues/issue-60674.stdout index 86c3591b3afc0..395d9e21b3848 100644 --- a/src/test/ui/async-await/issues/issue-60674.stdout +++ b/src/test/ui/async-await/issues/issue-60674.stdout @@ -1,3 +1,3 @@ -async fn f(mut x: u8) { } -async fn g((mut x, y, mut z): (u8, u8, u8)) { } -async fn g(mut x: u8, (a, mut b, c): (u8, u8, u8), y: u8) { } +async fn f(mut x : u8) { } +async fn g((mut x, y, mut z) : (u8, u8, u8)) { } +async fn g(mut x : u8, (a, mut b, c) : (u8, u8, u8), y : u8) { } diff --git a/src/test/ui/const-generics/lazy-normalization/issue-71922.rs b/src/test/ui/const-generics/lazy-normalization/issue-71922.rs deleted file mode 100644 index 0d392ddcaedcc..0000000000000 --- a/src/test/ui/const-generics/lazy-normalization/issue-71922.rs +++ /dev/null @@ -1,19 +0,0 @@ -#![feature(const_generics)] -//~^ WARN the feature `const_generics` is incomplete -trait Foo {} - -impl Foo for [(); N] where Self: FooImpl<{ N == 0 }> {} -//~^ ERROR constant expression depends on a generic parameter - -trait FooImpl {} - -impl FooImpl<{ 0u8 == 0u8 }> for [(); 0] {} - -impl FooImpl<{ 0u8 != 0u8 }> for [(); N] {} - -fn foo(_: T) {} - -fn main() { - foo([]); - foo([()]); -} diff --git a/src/test/ui/const-generics/lazy-normalization/issue-71922.stderr b/src/test/ui/const-generics/lazy-normalization/issue-71922.stderr deleted file mode 100644 index 00917571e716d..0000000000000 --- a/src/test/ui/const-generics/lazy-normalization/issue-71922.stderr +++ /dev/null @@ -1,19 +0,0 @@ -warning: the feature `const_generics` is incomplete and may not be safe to use and/or cause compiler crashes - --> $DIR/issue-71922.rs:1:12 - | -LL | #![feature(const_generics)] - | ^^^^^^^^^^^^^^ - | - = note: `#[warn(incomplete_features)]` on by default - = note: see issue #44580 for more information - -error: constant expression depends on a generic parameter - --> $DIR/issue-71922.rs:5:50 - | -LL | impl Foo for [(); N] where Self: FooImpl<{ N == 0 }> {} - | ^^^^^^^^^^^^^^^^^^^ - | - = note: this may fail depending on what value the parameter takes - -error: aborting due to previous error; 1 warning emitted - diff --git a/src/test/ui/duplicate_entry_error.rs b/src/test/ui/duplicate_entry_error.rs index b8d98a8999b9d..776ecedea7e7e 100644 --- a/src/test/ui/duplicate_entry_error.rs +++ b/src/test/ui/duplicate_entry_error.rs @@ -1,3 +1,4 @@ +// normalize-stderr-test "loaded from .*libstd-.*.rlib" -> "loaded from SYSROOT/libstd-*.rlib" // note-pattern: first defined in crate `std`. // Test for issue #31788 and E0152 diff --git a/src/test/ui/duplicate_entry_error.stderr b/src/test/ui/duplicate_entry_error.stderr index 2d52ea3f6c20e..61cccf40ed8a5 100644 --- a/src/test/ui/duplicate_entry_error.stderr +++ b/src/test/ui/duplicate_entry_error.stderr @@ -1,5 +1,5 @@ error[E0152]: found duplicate lang item `panic_impl` - --> $DIR/duplicate_entry_error.rs:10:1 + --> $DIR/duplicate_entry_error.rs:11:1 | LL | / fn panic_impl(info: &PanicInfo) -> ! { LL | | @@ -8,6 +8,8 @@ LL | | } | |_^ | = note: the lang item is first defined in crate `std` (which `duplicate_entry_error` depends on) + = note: first definition in `std` loaded from SYSROOT/libstd-*.rlib + = note: second definition in the local crate (`duplicate_entry_error`) error: aborting due to previous error diff --git a/src/test/ui/error-codes/E0152.rs b/src/test/ui/error-codes/E0152.rs index 94467b9bddeb0..d716ca1a14fdf 100644 --- a/src/test/ui/error-codes/E0152.rs +++ b/src/test/ui/error-codes/E0152.rs @@ -1,3 +1,4 @@ +// normalize-stderr-test "loaded from .*liballoc-.*.rlib" -> "loaded from SYSROOT/liballoc-*.rlib" #![feature(lang_items)] #[lang = "owned_box"] diff --git a/src/test/ui/error-codes/E0152.stderr b/src/test/ui/error-codes/E0152.stderr index fbaa276ce1093..7445c2880af1c 100644 --- a/src/test/ui/error-codes/E0152.stderr +++ b/src/test/ui/error-codes/E0152.stderr @@ -1,10 +1,12 @@ error[E0152]: found duplicate lang item `owned_box` - --> $DIR/E0152.rs:4:1 + --> $DIR/E0152.rs:5:1 | LL | struct Foo; | ^^^^^^^^^^^ | = note: the lang item is first defined in crate `alloc` (which `std` depends on) + = note: first definition in `alloc` loaded from SYSROOT/liballoc-*.rlib + = note: second definition in the local crate (`E0152`) error: aborting due to previous error diff --git a/src/test/ui/lint/lint-nonstandard-style-unicode-1.rs b/src/test/ui/lint/lint-nonstandard-style-unicode-1.rs new file mode 100644 index 0000000000000..4f90bd98c63e5 --- /dev/null +++ b/src/test/ui/lint/lint-nonstandard-style-unicode-1.rs @@ -0,0 +1,50 @@ +#![allow(dead_code)] + +#![forbid(non_camel_case_types)] +#![feature(non_ascii_idents)] + +// Some scripts (e.g., hiragana) don't have a concept of +// upper/lowercase + +// 1. non_camel_case_types + +// Can start with non-lowercase letter +struct Θχ; +struct ヒa; + +struct χa; +//~^ ERROR type `χa` should have an upper camel case name + +// If there's already leading or trailing underscores, they get trimmed before checking. +// This is fine: +struct _ヒb; + +// This is not: +struct __χa; +//~^ ERROR type `__χa` should have an upper camel case name + +// Besides this, we cannot have two continous underscores in the middle. + +struct 对__否; +//~^ ERROR type `对__否` should have an upper camel case name + +struct ヒ__χ; +//~^ ERROR type `ヒ__χ` should have an upper camel case name + +// also cannot have lowercase letter next to a underscore. +// so this triggers the lint: + +struct Hello_你好; +//~^ ERROR type `Hello_你好` should have an upper camel case name + +struct Hello_World; +//~^ ERROR type `Hello_World` should have an upper camel case name + +struct 你_ӟ; +//~^ ERROR type `你_ӟ` should have an upper camel case name + +// and this is ok: + +struct 你_好; + +fn main() {} diff --git a/src/test/ui/lint/lint-nonstandard-style-unicode-1.stderr b/src/test/ui/lint/lint-nonstandard-style-unicode-1.stderr new file mode 100644 index 0000000000000..371002656591c --- /dev/null +++ b/src/test/ui/lint/lint-nonstandard-style-unicode-1.stderr @@ -0,0 +1,50 @@ +error: type `χa` should have an upper camel case name + --> $DIR/lint-nonstandard-style-unicode-1.rs:15:8 + | +LL | struct χa; + | ^^ help: convert the identifier to upper camel case: `Χa` + | +note: the lint level is defined here + --> $DIR/lint-nonstandard-style-unicode-1.rs:3:11 + | +LL | #![forbid(non_camel_case_types)] + | ^^^^^^^^^^^^^^^^^^^^ + +error: type `__χa` should have an upper camel case name + --> $DIR/lint-nonstandard-style-unicode-1.rs:23:8 + | +LL | struct __χa; + | ^^^^ help: convert the identifier to upper camel case: `Χa` + +error: type `对__否` should have an upper camel case name + --> $DIR/lint-nonstandard-style-unicode-1.rs:28:8 + | +LL | struct 对__否; + | ^^^^^^ help: convert the identifier to upper camel case: `对_否` + +error: type `ヒ__χ` should have an upper camel case name + --> $DIR/lint-nonstandard-style-unicode-1.rs:31:8 + | +LL | struct ヒ__χ; + | ^^^^^ help: convert the identifier to upper camel case: `ヒΧ` + +error: type `Hello_你好` should have an upper camel case name + --> $DIR/lint-nonstandard-style-unicode-1.rs:37:8 + | +LL | struct Hello_你好; + | ^^^^^^^^^^ help: convert the identifier to upper camel case: `Hello你好` + +error: type `Hello_World` should have an upper camel case name + --> $DIR/lint-nonstandard-style-unicode-1.rs:40:8 + | +LL | struct Hello_World; + | ^^^^^^^^^^^ help: convert the identifier to upper camel case: `HelloWorld` + +error: type `你_ӟ` should have an upper camel case name + --> $DIR/lint-nonstandard-style-unicode-1.rs:43:8 + | +LL | struct 你_ӟ; + | ^^^^ help: convert the identifier to upper camel case: `你Ӟ` + +error: aborting due to 7 previous errors + diff --git a/src/test/ui/lint/lint-nonstandard-style-unicode-2.rs b/src/test/ui/lint/lint-nonstandard-style-unicode-2.rs new file mode 100644 index 0000000000000..813e0ea5c5708 --- /dev/null +++ b/src/test/ui/lint/lint-nonstandard-style-unicode-2.rs @@ -0,0 +1,30 @@ +#![allow(dead_code)] + +#![forbid(non_snake_case)] +#![feature(non_ascii_idents)] + +// Some scripts (e.g., hiragana) don't have a concept of +// upper/lowercase + +// 2. non_snake_case + +// Can only use non-uppercase letters. +// So this works: + +fn 编程() {} + +// but this doesn't: + +fn Ц() {} +//~^ ERROR function `Ц` should have a snake case name + +// besides this, you cannot use continous underscores in the middle + +fn 分__隔() {} +//~^ ERROR function `分__隔` should have a snake case name + +// but you can use them both at the beginning and at the end. + +fn _______不_连_续_的_存_在_______() {} + +fn main() {} diff --git a/src/test/ui/lint/lint-nonstandard-style-unicode-2.stderr b/src/test/ui/lint/lint-nonstandard-style-unicode-2.stderr new file mode 100644 index 0000000000000..0b309e315a411 --- /dev/null +++ b/src/test/ui/lint/lint-nonstandard-style-unicode-2.stderr @@ -0,0 +1,20 @@ +error: function `Ц` should have a snake case name + --> $DIR/lint-nonstandard-style-unicode-2.rs:18:4 + | +LL | fn Ц() {} + | ^ help: convert the identifier to snake case: `ц` + | +note: the lint level is defined here + --> $DIR/lint-nonstandard-style-unicode-2.rs:3:11 + | +LL | #![forbid(non_snake_case)] + | ^^^^^^^^^^^^^^ + +error: function `分__隔` should have a snake case name + --> $DIR/lint-nonstandard-style-unicode-2.rs:23:4 + | +LL | fn 分__隔() {} + | ^^^^^^ help: convert the identifier to snake case: `分_隔` + +error: aborting due to 2 previous errors + diff --git a/src/test/ui/lint/lint-nonstandard-style-unicode-3.rs b/src/test/ui/lint/lint-nonstandard-style-unicode-3.rs new file mode 100644 index 0000000000000..b17c2de39a0c0 --- /dev/null +++ b/src/test/ui/lint/lint-nonstandard-style-unicode-3.rs @@ -0,0 +1,25 @@ +#![allow(dead_code)] + +#![forbid(non_upper_case_globals)] +#![feature(non_ascii_idents)] + +// Some scripts (e.g., hiragana) don't have a concept of +// upper/lowercase + +// 3. non_upper_case_globals + +// Can only use non-lowercase letters. +// So this works: + +static ラ: usize = 0; + +// but this doesn't: + +static τεχ: f32 = 3.14159265; +//~^ ERROR static variable `τεχ` should have an upper case name + +// This has no limit at all on underscore usages. + +static __密__封__线__内__禁__止__答__题__: bool = true; + +fn main() {} diff --git a/src/test/ui/lint/lint-nonstandard-style-unicode-3.stderr b/src/test/ui/lint/lint-nonstandard-style-unicode-3.stderr new file mode 100644 index 0000000000000..44bd5ad55ff5c --- /dev/null +++ b/src/test/ui/lint/lint-nonstandard-style-unicode-3.stderr @@ -0,0 +1,14 @@ +error: static variable `τεχ` should have an upper case name + --> $DIR/lint-nonstandard-style-unicode-3.rs:18:8 + | +LL | static τεχ: f32 = 3.14159265; + | ^^^ help: convert the identifier to upper case: `ΤΕΧ` + | +note: the lint level is defined here + --> $DIR/lint-nonstandard-style-unicode-3.rs:3:11 + | +LL | #![forbid(non_upper_case_globals)] + | ^^^^^^^^^^^^^^^^^^^^^^ + +error: aborting due to previous error + diff --git a/src/test/ui/lint/lint-nonstandard-style-unicode.rs b/src/test/ui/lint/lint-nonstandard-style-unicode.rs deleted file mode 100644 index 9f16cb20fb32c..0000000000000 --- a/src/test/ui/lint/lint-nonstandard-style-unicode.rs +++ /dev/null @@ -1,16 +0,0 @@ -// check-pass - -#![allow(dead_code)] - -#![forbid(non_camel_case_types)] -#![forbid(non_upper_case_globals)] -#![feature(non_ascii_idents)] - -// Some scripts (e.g., hiragana) don't have a concept of -// upper/lowercase - -struct ヒ; - -static ラ: usize = 0; - -pub fn main() {} diff --git a/src/test/ui/macros/doc-comment.rs b/src/test/ui/macros/doc-comment.rs new file mode 100644 index 0000000000000..9de39e9b56c98 --- /dev/null +++ b/src/test/ui/macros/doc-comment.rs @@ -0,0 +1,25 @@ +// check-pass +// Tests that we properly handle a nested macro expansion +// involving a `#[doc]` attribute +#![deny(missing_docs)] +//! Crate docs + +macro_rules! doc_comment { + ($x:expr, $($tt:tt)*) => { + #[doc = $x] + $($tt)* + } +} + +macro_rules! make_comment { + () => { + doc_comment!("Function docs", + pub fn bar() {} + ); + } +} + + +make_comment!(); + +fn main() {} diff --git a/src/test/ui/panic-handler/panic-handler-std.rs b/src/test/ui/panic-handler/panic-handler-std.rs index 0acc2722cb21f..6183c886cfac7 100644 --- a/src/test/ui/panic-handler/panic-handler-std.rs +++ b/src/test/ui/panic-handler/panic-handler-std.rs @@ -1,3 +1,4 @@ +// normalize-stderr-test "loaded from .*libstd-.*.rlib" -> "loaded from SYSROOT/libstd-*.rlib" // error-pattern: found duplicate lang item `panic_impl` diff --git a/src/test/ui/panic-handler/panic-handler-std.stderr b/src/test/ui/panic-handler/panic-handler-std.stderr index f71c28e5aa641..bb656089bcaff 100644 --- a/src/test/ui/panic-handler/panic-handler-std.stderr +++ b/src/test/ui/panic-handler/panic-handler-std.stderr @@ -1,5 +1,5 @@ error[E0152]: found duplicate lang item `panic_impl` - --> $DIR/panic-handler-std.rs:7:1 + --> $DIR/panic-handler-std.rs:8:1 | LL | / fn panic(info: PanicInfo) -> ! { LL | | loop {} @@ -7,9 +7,11 @@ LL | | } | |_^ | = note: the lang item is first defined in crate `std` (which `panic_handler_std` depends on) + = note: first definition in `std` loaded from SYSROOT/libstd-*.rlib + = note: second definition in the local crate (`panic_handler_std`) error: argument should be `&PanicInfo` - --> $DIR/panic-handler-std.rs:7:16 + --> $DIR/panic-handler-std.rs:8:16 | LL | fn panic(info: PanicInfo) -> ! { | ^^^^^^^^^ diff --git a/src/test/ui/parser/recover-field-extra-angle-brackets.rs b/src/test/ui/parser/recover-field-extra-angle-brackets.rs new file mode 100644 index 0000000000000..5e0e00bcb5e8d --- /dev/null +++ b/src/test/ui/parser/recover-field-extra-angle-brackets.rs @@ -0,0 +1,14 @@ +// Tests that we recover from extra trailing angle brackets +// in a struct field + +struct BadStruct { + first: Vec>, //~ ERROR unmatched angle bracket + second: bool +} + +fn bar(val: BadStruct) { + val.first; + val.second; +} + +fn main() {} diff --git a/src/test/ui/parser/recover-field-extra-angle-brackets.stderr b/src/test/ui/parser/recover-field-extra-angle-brackets.stderr new file mode 100644 index 0000000000000..318e55f6e99ac --- /dev/null +++ b/src/test/ui/parser/recover-field-extra-angle-brackets.stderr @@ -0,0 +1,8 @@ +error: unmatched angle bracket + --> $DIR/recover-field-extra-angle-brackets.rs:5:19 + | +LL | first: Vec>, + | ^ help: remove extra angle bracket + +error: aborting due to previous error + diff --git a/src/test/ui/proc-macro/auxiliary/attr-cfg.rs b/src/test/ui/proc-macro/auxiliary/attr-cfg.rs index f50e18d7be30e..2f0054cc14aa6 100644 --- a/src/test/ui/proc-macro/auxiliary/attr-cfg.rs +++ b/src/test/ui/proc-macro/auxiliary/attr-cfg.rs @@ -11,11 +11,9 @@ use proc_macro::TokenStream; pub fn attr_cfg(args: TokenStream, input: TokenStream) -> TokenStream { let input_str = input.to_string(); - assert_eq!(input_str, "fn outer() -> u8 { - #[cfg(foo)] - fn inner() -> u8 { 1 } - #[cfg(bar)] - fn inner() -> u8 { 2 } + assert_eq!(input_str, "fn outer() -> u8 +{ + #[cfg(foo)] fn inner() -> u8 { 1 } #[cfg(bar)] fn inner() -> u8 { 2 } inner() }"); diff --git a/src/test/ui/proc-macro/auxiliary/attr-stmt-expr-rpass.rs b/src/test/ui/proc-macro/auxiliary/attr-stmt-expr-rpass.rs index f1de3709b166b..e056bd32d2d0e 100644 --- a/src/test/ui/proc-macro/auxiliary/attr-stmt-expr-rpass.rs +++ b/src/test/ui/proc-macro/auxiliary/attr-stmt-expr-rpass.rs @@ -10,14 +10,14 @@ use proc_macro::TokenStream; #[proc_macro_attribute] pub fn expect_let(attr: TokenStream, item: TokenStream) -> TokenStream { assert!(attr.to_string().is_empty()); - assert_eq!(item.to_string(), "let string = \"Hello, world!\";"); + assert_eq!(item.to_string(), "let string = \"Hello, world!\" ;"); item } #[proc_macro_attribute] pub fn expect_print_stmt(attr: TokenStream, item: TokenStream) -> TokenStream { assert!(attr.to_string().is_empty()); - assert_eq!(item.to_string(), "println!(\"{}\", string);"); + assert_eq!(item.to_string(), "println ! (\"{}\", string) ;"); item } @@ -31,7 +31,7 @@ pub fn expect_expr(attr: TokenStream, item: TokenStream) -> TokenStream { #[proc_macro_attribute] pub fn expect_print_expr(attr: TokenStream, item: TokenStream) -> TokenStream { assert!(attr.to_string().is_empty()); - assert_eq!(item.to_string(), "println!(\"{}\", string)"); + assert_eq!(item.to_string(), "println ! (\"{}\", string)"); item } diff --git a/src/test/ui/proc-macro/auxiliary/attr-stmt-expr.rs b/src/test/ui/proc-macro/auxiliary/attr-stmt-expr.rs index d2180def5b760..213f999e9d0ea 100644 --- a/src/test/ui/proc-macro/auxiliary/attr-stmt-expr.rs +++ b/src/test/ui/proc-macro/auxiliary/attr-stmt-expr.rs @@ -10,14 +10,14 @@ use proc_macro::TokenStream; #[proc_macro_attribute] pub fn expect_let(attr: TokenStream, item: TokenStream) -> TokenStream { assert!(attr.to_string().is_empty()); - assert_eq!(item.to_string(), "let string = \"Hello, world!\";"); + assert_eq!(item.to_string(), "let string = \"Hello, world!\" ;"); item } #[proc_macro_attribute] pub fn expect_print_stmt(attr: TokenStream, item: TokenStream) -> TokenStream { assert!(attr.to_string().is_empty()); - assert_eq!(item.to_string(), "println!(\"{}\", string);"); + assert_eq!(item.to_string(), "println ! (\"{}\", string) ;"); item } @@ -31,7 +31,7 @@ pub fn expect_expr(attr: TokenStream, item: TokenStream) -> TokenStream { #[proc_macro_attribute] pub fn expect_print_expr(attr: TokenStream, item: TokenStream) -> TokenStream { assert!(attr.to_string().is_empty()); - assert_eq!(item.to_string(), "println!(\"{}\", string)"); + assert_eq!(item.to_string(), "println ! (\"{}\", string)"); item } diff --git a/src/test/ui/proc-macro/auxiliary/derive-a.rs b/src/test/ui/proc-macro/auxiliary/derive-a.rs index cd2be5fd84d44..79a3864bf991d 100644 --- a/src/test/ui/proc-macro/auxiliary/derive-a.rs +++ b/src/test/ui/proc-macro/auxiliary/derive-a.rs @@ -10,6 +10,6 @@ use proc_macro::TokenStream; #[proc_macro_derive(A)] pub fn derive(input: TokenStream) -> TokenStream { let input = input.to_string(); - assert!(input.contains("struct A;")); + assert!(input.contains("struct A ;")); "".parse().unwrap() } diff --git a/src/test/ui/proc-macro/auxiliary/derive-atob.rs b/src/test/ui/proc-macro/auxiliary/derive-atob.rs index e78e5bb8f4c75..207b7fd320360 100644 --- a/src/test/ui/proc-macro/auxiliary/derive-atob.rs +++ b/src/test/ui/proc-macro/auxiliary/derive-atob.rs @@ -10,6 +10,6 @@ use proc_macro::TokenStream; #[proc_macro_derive(AToB)] pub fn derive(input: TokenStream) -> TokenStream { let input = input.to_string(); - assert_eq!(input, "struct A;"); + assert_eq!(input, "struct A ;"); "struct B;".parse().unwrap() } diff --git a/src/test/ui/proc-macro/auxiliary/derive-b-rpass.rs b/src/test/ui/proc-macro/auxiliary/derive-b-rpass.rs index 3e6af67a9f412..641a95f78c112 100644 --- a/src/test/ui/proc-macro/auxiliary/derive-b-rpass.rs +++ b/src/test/ui/proc-macro/auxiliary/derive-b-rpass.rs @@ -10,7 +10,7 @@ use proc_macro::TokenStream; #[proc_macro_derive(B, attributes(B, C))] pub fn derive(input: TokenStream) -> TokenStream { let input = input.to_string(); - assert!(input.contains("#[B[arbitrary tokens]]")); + assert!(input.contains("#[B [arbitrary tokens]]")); assert!(input.contains("struct B {")); assert!(input.contains("#[C]")); "".parse().unwrap() diff --git a/src/test/ui/proc-macro/auxiliary/derive-ctod.rs b/src/test/ui/proc-macro/auxiliary/derive-ctod.rs index dbf44ed1b0537..2efe5a9134054 100644 --- a/src/test/ui/proc-macro/auxiliary/derive-ctod.rs +++ b/src/test/ui/proc-macro/auxiliary/derive-ctod.rs @@ -10,6 +10,6 @@ use proc_macro::TokenStream; #[proc_macro_derive(CToD)] pub fn derive(input: TokenStream) -> TokenStream { let input = input.to_string(); - assert_eq!(input, "struct C;"); + assert_eq!(input, "struct C ;"); "struct D;".parse().unwrap() } diff --git a/src/test/ui/proc-macro/auxiliary/derive-same-struct.rs b/src/test/ui/proc-macro/auxiliary/derive-same-struct.rs index ce7a50d2381cd..7598d632cb6d5 100644 --- a/src/test/ui/proc-macro/auxiliary/derive-same-struct.rs +++ b/src/test/ui/proc-macro/auxiliary/derive-same-struct.rs @@ -10,12 +10,12 @@ use proc_macro::TokenStream; #[proc_macro_derive(AToB)] pub fn derive1(input: TokenStream) -> TokenStream { println!("input1: {:?}", input.to_string()); - assert_eq!(input.to_string(), "struct A;"); + assert_eq!(input.to_string(), "struct A ;"); "#[derive(BToC)] struct B;".parse().unwrap() } #[proc_macro_derive(BToC)] pub fn derive2(input: TokenStream) -> TokenStream { - assert_eq!(input.to_string(), "struct B;"); + assert_eq!(input.to_string(), "struct B ;"); "struct C;".parse().unwrap() } diff --git a/src/test/ui/proc-macro/auxiliary/derive-union.rs b/src/test/ui/proc-macro/auxiliary/derive-union.rs index d950e1e773c70..05883170c6c48 100644 --- a/src/test/ui/proc-macro/auxiliary/derive-union.rs +++ b/src/test/ui/proc-macro/auxiliary/derive-union.rs @@ -12,7 +12,7 @@ pub fn derive(input: TokenStream) -> TokenStream { let input = input.to_string(); assert!(input.contains("#[repr(C)]")); assert!(input.contains("union Test {")); - assert!(input.contains("a: u8,")); + assert!(input.contains("a : u8,")); assert!(input.contains("}")); "".parse().unwrap() } diff --git a/src/test/ui/proc-macro/auxiliary/double.rs b/src/test/ui/proc-macro/auxiliary/double.rs index 3a2e8d04c36b5..99eb4e3754672 100644 --- a/src/test/ui/proc-macro/auxiliary/double.rs +++ b/src/test/ui/proc-macro/auxiliary/double.rs @@ -1,15 +1,16 @@ // force-host // no-prefer-dynamic +#![feature(proc_macro_quote)] + #![crate_type = "proc-macro"] extern crate proc_macro; - -use proc_macro::TokenStream; +use proc_macro::*; // Outputs another copy of the struct. Useful for testing the tokens // seen by the proc_macro. #[proc_macro_derive(Double)] pub fn derive(input: TokenStream) -> TokenStream { - format!("mod foo {{ {} }}", input.to_string()).parse().unwrap() + quote!(mod foo { $input }) } diff --git a/src/test/ui/proc-macro/auxiliary/expand-with-a-macro.rs b/src/test/ui/proc-macro/auxiliary/expand-with-a-macro.rs index 5155a4b855865..d779d57af14c7 100644 --- a/src/test/ui/proc-macro/auxiliary/expand-with-a-macro.rs +++ b/src/test/ui/proc-macro/auxiliary/expand-with-a-macro.rs @@ -11,7 +11,7 @@ use proc_macro::TokenStream; #[proc_macro_derive(A)] pub fn derive(input: TokenStream) -> TokenStream { let input = input.to_string(); - assert!(input.contains("struct A;")); + assert!(input.contains("struct A ;")); r#" impl A { fn a(&self) { diff --git a/src/test/ui/proc-macro/auxiliary/meta-delim.rs b/src/test/ui/proc-macro/auxiliary/meta-delim.rs new file mode 100644 index 0000000000000..54e3d7857267b --- /dev/null +++ b/src/test/ui/proc-macro/auxiliary/meta-delim.rs @@ -0,0 +1,12 @@ +macro_rules! produce_it { + ($dollar_one:tt $foo:ident $my_name:ident) => { + #[macro_export] + macro_rules! meta_delim { + ($dollar_one ($dollar_one $my_name:ident)*) => { + stringify!($dollar_one ($dollar_one $my_name)*) + } + } + } +} + +produce_it!($my_name name); diff --git a/src/test/ui/proc-macro/auxiliary/nested-macro-rules.rs b/src/test/ui/proc-macro/auxiliary/nested-macro-rules.rs new file mode 100644 index 0000000000000..52ebe8e7fb2ef --- /dev/null +++ b/src/test/ui/proc-macro/auxiliary/nested-macro-rules.rs @@ -0,0 +1,15 @@ +pub struct FirstStruct; + +#[macro_export] +macro_rules! outer_macro { + ($name:ident) => { + #[macro_export] + macro_rules! inner_macro { + ($wrapper:ident) => { + $wrapper!($name) + } + } + } +} + +outer_macro!(FirstStruct); diff --git a/src/test/ui/proc-macro/auxiliary/test-macros.rs b/src/test/ui/proc-macro/auxiliary/test-macros.rs index fb8016cd43896..8682ebdd109f0 100644 --- a/src/test/ui/proc-macro/auxiliary/test-macros.rs +++ b/src/test/ui/proc-macro/auxiliary/test-macros.rs @@ -101,6 +101,12 @@ pub fn print_bang(input: TokenStream) -> TokenStream { print_helper(input, "BANG") } +#[proc_macro] +pub fn print_bang_consume(input: TokenStream) -> TokenStream { + print_helper(input, "BANG"); + TokenStream::new() +} + #[proc_macro_attribute] pub fn print_attr(_: TokenStream, input: TokenStream) -> TokenStream { print_helper(input, "ATTR") diff --git a/src/test/ui/proc-macro/derive-same-struct.stdout b/src/test/ui/proc-macro/derive-same-struct.stdout index 77605de5e33fc..7478d9741409b 100644 --- a/src/test/ui/proc-macro/derive-same-struct.stdout +++ b/src/test/ui/proc-macro/derive-same-struct.stdout @@ -1 +1 @@ -input1: "struct A;" +input1: "struct A ;" diff --git a/src/test/ui/proc-macro/dollar-crate-issue-57089.stdout b/src/test/ui/proc-macro/dollar-crate-issue-57089.stdout index 15433bebde967..5d93144b44553 100644 --- a/src/test/ui/proc-macro/dollar-crate-issue-57089.stdout +++ b/src/test/ui/proc-macro/dollar-crate-issue-57089.stdout @@ -38,8 +38,7 @@ PRINT-BANG INPUT (DEBUG): TokenStream [ span: #3 bytes(LO..HI), }, ] -PRINT-ATTR INPUT (DISPLAY): struct A(crate::S); -PRINT-ATTR RE-COLLECTED (DISPLAY): struct A($crate :: S) ; +PRINT-ATTR INPUT (DISPLAY): struct A($crate :: S) ; PRINT-ATTR INPUT (DEBUG): TokenStream [ Ident { ident: "struct", diff --git a/src/test/ui/proc-macro/dollar-crate-issue-62325.stdout b/src/test/ui/proc-macro/dollar-crate-issue-62325.stdout index 73e407918ec8c..e4212377626ca 100644 --- a/src/test/ui/proc-macro/dollar-crate-issue-62325.stdout +++ b/src/test/ui/proc-macro/dollar-crate-issue-62325.stdout @@ -1,5 +1,4 @@ -PRINT-ATTR INPUT (DISPLAY): struct A(identity!(crate :: S)); -PRINT-ATTR RE-COLLECTED (DISPLAY): struct A(identity ! ($crate :: S)) ; +PRINT-ATTR INPUT (DISPLAY): struct A(identity ! ($crate :: S)) ; PRINT-ATTR INPUT (DEBUG): TokenStream [ Ident { ident: "struct", @@ -54,8 +53,7 @@ PRINT-ATTR INPUT (DEBUG): TokenStream [ span: #3 bytes(LO..HI), }, ] -PRINT-ATTR INPUT (DISPLAY): struct B(identity!(::dollar_crate_external :: S)); -PRINT-ATTR RE-COLLECTED (DISPLAY): struct B(identity ! ($crate :: S)) ; +PRINT-ATTR INPUT (DISPLAY): struct B(identity ! ($crate :: S)) ; PRINT-ATTR INPUT (DEBUG): TokenStream [ Ident { ident: "struct", diff --git a/src/test/ui/proc-macro/dollar-crate.stdout b/src/test/ui/proc-macro/dollar-crate.stdout index e125a3e7f1737..8a7406b1a3d13 100644 --- a/src/test/ui/proc-macro/dollar-crate.stdout +++ b/src/test/ui/proc-macro/dollar-crate.stdout @@ -38,8 +38,7 @@ PRINT-BANG INPUT (DEBUG): TokenStream [ span: #3 bytes(LO..HI), }, ] -PRINT-ATTR INPUT (DISPLAY): struct A(crate::S); -PRINT-ATTR RE-COLLECTED (DISPLAY): struct A($crate :: S) ; +PRINT-ATTR INPUT (DISPLAY): struct A($crate :: S) ; PRINT-ATTR INPUT (DEBUG): TokenStream [ Ident { ident: "struct", @@ -79,8 +78,7 @@ PRINT-ATTR INPUT (DEBUG): TokenStream [ span: #3 bytes(LO..HI), }, ] -PRINT-DERIVE INPUT (DISPLAY): struct D(crate::S); -PRINT-DERIVE RE-COLLECTED (DISPLAY): struct D($crate :: S) ; +PRINT-DERIVE INPUT (DISPLAY): struct D($crate :: S) ; PRINT-DERIVE INPUT (DEBUG): TokenStream [ Ident { ident: "struct", @@ -160,8 +158,7 @@ PRINT-BANG INPUT (DEBUG): TokenStream [ span: #13 bytes(LO..HI), }, ] -PRINT-ATTR INPUT (DISPLAY): struct A(::dollar_crate_external::S); -PRINT-ATTR RE-COLLECTED (DISPLAY): struct A($crate :: S) ; +PRINT-ATTR INPUT (DISPLAY): struct A($crate :: S) ; PRINT-ATTR INPUT (DEBUG): TokenStream [ Ident { ident: "struct", @@ -201,8 +198,7 @@ PRINT-ATTR INPUT (DEBUG): TokenStream [ span: #13 bytes(LO..HI), }, ] -PRINT-DERIVE INPUT (DISPLAY): struct D(::dollar_crate_external::S); -PRINT-DERIVE RE-COLLECTED (DISPLAY): struct D($crate :: S) ; +PRINT-DERIVE INPUT (DISPLAY): struct D($crate :: S) ; PRINT-DERIVE INPUT (DEBUG): TokenStream [ Ident { ident: "struct", diff --git a/src/test/ui/proc-macro/input-interpolated.stdout b/src/test/ui/proc-macro/input-interpolated.stdout index 7529db3bd06f8..ee988d48b461d 100644 --- a/src/test/ui/proc-macro/input-interpolated.stdout +++ b/src/test/ui/proc-macro/input-interpolated.stdout @@ -1,5 +1,4 @@ PRINT-BANG INPUT (DISPLAY): A -PRINT-BANG RE-COLLECTED (DISPLAY): A PRINT-BANG INPUT (DEBUG): TokenStream [ Group { delimiter: None, @@ -12,8 +11,7 @@ PRINT-BANG INPUT (DEBUG): TokenStream [ span: #3 bytes(269..271), }, ] -PRINT-ATTR INPUT (DISPLAY): const A: u8 = 0; -PRINT-ATTR RE-COLLECTED (DISPLAY): const A : u8 = 0 ; +PRINT-ATTR INPUT (DISPLAY): const A : u8 = 0 ; PRINT-ATTR INPUT (DEBUG): TokenStream [ Ident { ident: "const", @@ -49,9 +47,7 @@ PRINT-ATTR INPUT (DEBUG): TokenStream [ span: #0 bytes(0..0), }, ] -PRINT-DERIVE INPUT (DISPLAY): struct A { -} -PRINT-DERIVE RE-COLLECTED (DISPLAY): struct A { } +PRINT-DERIVE INPUT (DISPLAY): struct A { } PRINT-DERIVE INPUT (DEBUG): TokenStream [ Ident { ident: "struct", diff --git a/src/test/ui/proc-macro/meta-delim.rs b/src/test/ui/proc-macro/meta-delim.rs new file mode 100644 index 0000000000000..964291bc6784c --- /dev/null +++ b/src/test/ui/proc-macro/meta-delim.rs @@ -0,0 +1,12 @@ +// aux-build:meta-delim.rs +// edition:2018 +// run-pass + +// Tests that we can properly deserialize a macro with strange delimiters +// See https://github.com/rust-lang/rust/pull/73569#issuecomment-650860457 + +extern crate meta_delim; + +fn main() { + assert_eq!("a bunch of idents", meta_delim::meta_delim!(a bunch of idents)); +} diff --git a/src/test/ui/proc-macro/nested-macro-rules.rs b/src/test/ui/proc-macro/nested-macro-rules.rs new file mode 100644 index 0000000000000..2f8ef20232782 --- /dev/null +++ b/src/test/ui/proc-macro/nested-macro-rules.rs @@ -0,0 +1,20 @@ +// run-pass +// aux-build:nested-macro-rules.rs +// aux-build:test-macros.rs +// compile-flags: -Z span-debug +// edition:2018 + +extern crate nested_macro_rules; +extern crate test_macros; + +use test_macros::print_bang; + +use nested_macro_rules::FirstStruct; +struct SecondStruct; + +fn main() { + nested_macro_rules::inner_macro!(print_bang); + + nested_macro_rules::outer_macro!(SecondStruct); + inner_macro!(print_bang); +} diff --git a/src/test/ui/proc-macro/nested-macro-rules.stdout b/src/test/ui/proc-macro/nested-macro-rules.stdout new file mode 100644 index 0000000000000..e4cfe020324b8 --- /dev/null +++ b/src/test/ui/proc-macro/nested-macro-rules.stdout @@ -0,0 +1,26 @@ +PRINT-BANG INPUT (DISPLAY): FirstStruct +PRINT-BANG INPUT (DEBUG): TokenStream [ + Group { + delimiter: None, + stream: TokenStream [ + Ident { + ident: "FirstStruct", + span: $DIR/auxiliary/nested-macro-rules.rs:15:14: 15:25 (#3), + }, + ], + span: $DIR/auxiliary/nested-macro-rules.rs:9:27: 9:32 (#3), + }, +] +PRINT-BANG INPUT (DISPLAY): SecondStruct +PRINT-BANG INPUT (DEBUG): TokenStream [ + Group { + delimiter: None, + stream: TokenStream [ + Ident { + ident: "SecondStruct", + span: $DIR/nested-macro-rules.rs:18:38: 18:50 (#9), + }, + ], + span: $DIR/auxiliary/nested-macro-rules.rs:9:27: 9:32 (#8), + }, +] diff --git a/src/test/ui/proc-macro/nodelim-groups.rs b/src/test/ui/proc-macro/nodelim-groups.rs new file mode 100644 index 0000000000000..cfcd4c0d2a658 --- /dev/null +++ b/src/test/ui/proc-macro/nodelim-groups.rs @@ -0,0 +1,19 @@ +// run-pass +// aux-build:test-macros.rs +// compile-flags: -Z span-debug +// edition:2018 +// +// Tests the pretty-printing behavior of inserting `NoDelim` groups + +extern crate test_macros; +use test_macros::print_bang_consume; + +macro_rules! expand_it { + (($val1:expr) ($val2:expr)) => { expand_it!($val1 + $val2) }; + ($val:expr) => { print_bang_consume!("hi" $val (1 + 1)) }; +} + +fn main() { + expand_it!(1 + (25) + 1); + expand_it!(("hello".len()) ("world".len())); +} diff --git a/src/test/ui/proc-macro/nodelim-groups.stdout b/src/test/ui/proc-macro/nodelim-groups.stdout new file mode 100644 index 0000000000000..75a189a9fcdea --- /dev/null +++ b/src/test/ui/proc-macro/nodelim-groups.stdout @@ -0,0 +1,156 @@ +PRINT-BANG INPUT (DISPLAY): "hi" 1 + (25) + 1 (1 + 1) +PRINT-BANG INPUT (DEBUG): TokenStream [ + Literal { + kind: Str, + symbol: "hi", + suffix: None, + span: $DIR/nodelim-groups.rs:13:42: 13:46 (#3), + }, + Group { + delimiter: None, + stream: TokenStream [ + Literal { + kind: Integer, + symbol: "1", + suffix: None, + span: $DIR/nodelim-groups.rs:17:16: 17:17 (#0), + }, + Punct { + ch: '+', + spacing: Alone, + span: $DIR/nodelim-groups.rs:17:18: 17:19 (#0), + }, + Group { + delimiter: Parenthesis, + stream: TokenStream [ + Literal { + kind: Integer, + symbol: "25", + suffix: None, + span: $DIR/nodelim-groups.rs:17:21: 17:23 (#0), + }, + ], + span: $DIR/nodelim-groups.rs:17:20: 17:24 (#0), + }, + Punct { + ch: '+', + spacing: Alone, + span: $DIR/nodelim-groups.rs:17:25: 17:26 (#0), + }, + Literal { + kind: Integer, + symbol: "1", + suffix: None, + span: $DIR/nodelim-groups.rs:17:27: 17:28 (#0), + }, + ], + span: $DIR/nodelim-groups.rs:13:47: 13:51 (#3), + }, + Group { + delimiter: Parenthesis, + stream: TokenStream [ + Literal { + kind: Integer, + symbol: "1", + suffix: None, + span: $DIR/nodelim-groups.rs:13:53: 13:54 (#3), + }, + Punct { + ch: '+', + spacing: Alone, + span: $DIR/nodelim-groups.rs:13:55: 13:56 (#3), + }, + Literal { + kind: Integer, + symbol: "1", + suffix: None, + span: $DIR/nodelim-groups.rs:13:57: 13:58 (#3), + }, + ], + span: $DIR/nodelim-groups.rs:13:52: 13:59 (#3), + }, +] +PRINT-BANG INPUT (DISPLAY): "hi" "hello".len() + "world".len() (1 + 1) +PRINT-BANG RE-COLLECTED (DISPLAY): "hi" "hello" . len() + "world" . len() (1 + 1) +PRINT-BANG INPUT (DEBUG): TokenStream [ + Literal { + kind: Str, + symbol: "hi", + suffix: None, + span: $DIR/nodelim-groups.rs:13:42: 13:46 (#8), + }, + Group { + delimiter: None, + stream: TokenStream [ + Literal { + kind: Str, + symbol: "hello", + suffix: None, + span: $DIR/nodelim-groups.rs:13:47: 13:51 (#8), + }, + Punct { + ch: '.', + spacing: Alone, + span: $DIR/nodelim-groups.rs:13:47: 13:51 (#8), + }, + Ident { + ident: "len", + span: $DIR/nodelim-groups.rs:13:47: 13:51 (#8), + }, + Group { + delimiter: Parenthesis, + stream: TokenStream [], + span: $DIR/nodelim-groups.rs:13:47: 13:51 (#8), + }, + Punct { + ch: '+', + spacing: Alone, + span: $DIR/nodelim-groups.rs:13:47: 13:51 (#8), + }, + Literal { + kind: Str, + symbol: "world", + suffix: None, + span: $DIR/nodelim-groups.rs:13:47: 13:51 (#8), + }, + Punct { + ch: '.', + spacing: Alone, + span: $DIR/nodelim-groups.rs:13:47: 13:51 (#8), + }, + Ident { + ident: "len", + span: $DIR/nodelim-groups.rs:13:47: 13:51 (#8), + }, + Group { + delimiter: Parenthesis, + stream: TokenStream [], + span: $DIR/nodelim-groups.rs:13:47: 13:51 (#8), + }, + ], + span: $DIR/nodelim-groups.rs:13:47: 13:51 (#8), + }, + Group { + delimiter: Parenthesis, + stream: TokenStream [ + Literal { + kind: Integer, + symbol: "1", + suffix: None, + span: $DIR/nodelim-groups.rs:13:53: 13:54 (#8), + }, + Punct { + ch: '+', + spacing: Alone, + span: $DIR/nodelim-groups.rs:13:55: 13:56 (#8), + }, + Literal { + kind: Integer, + symbol: "1", + suffix: None, + span: $DIR/nodelim-groups.rs:13:57: 13:58 (#8), + }, + ], + span: $DIR/nodelim-groups.rs:13:52: 13:59 (#8), + }, +] diff --git a/src/test/ui/rfc-2565-param-attrs/auxiliary/param-attrs.rs b/src/test/ui/rfc-2565-param-attrs/auxiliary/param-attrs.rs index c537c1034b5a6..2a172c8458d71 100644 --- a/src/test/ui/rfc-2565-param-attrs/auxiliary/param-attrs.rs +++ b/src/test/ui/rfc-2565-param-attrs/auxiliary/param-attrs.rs @@ -17,28 +17,27 @@ macro_rules! checker { } } -checker!(attr_extern, r#"extern "C" { - fn ffi(#[a1] arg1: i32, #[a2] ...); -}"#); -checker!(attr_extern_cvar, r#"unsafe extern "C" fn cvar(arg1: i32, #[a1] mut args: ...) { }"#); -checker!(attr_alias, "type Alias = fn(#[a1] u8, #[a2] ...);"); -checker!(attr_free, "fn free(#[a1] arg1: u8) { let lam = |#[a2] W(x), #[a3] y| (); }"); -checker!(attr_inherent_1, "fn inherent1(#[a1] self, #[a2] arg1: u8) { }"); -checker!(attr_inherent_2, "fn inherent2(#[a1] &self, #[a2] arg1: u8) { }"); -checker!(attr_inherent_3, "fn inherent3<'a>(#[a1] &'a mut self, #[a2] arg1: u8) { }"); -checker!(attr_inherent_4, "fn inherent4<'a>(#[a1] self: Box, #[a2] arg1: u8) { }"); -checker!(attr_inherent_issue_64682, "fn inherent5(#[a1] #[a2] arg1: u8, #[a3] arg2: u8) { }"); -checker!(attr_trait_1, "fn trait1(#[a1] self, #[a2] arg1: u8);"); -checker!(attr_trait_2, "fn trait2(#[a1] &self, #[a2] arg1: u8);"); -checker!(attr_trait_3, "fn trait3<'a>(#[a1] &'a mut self, #[a2] arg1: u8);"); -checker!(attr_trait_4, "fn trait4<'a>(#[a1] self: Box, #[a2] arg1: u8, #[a3] Vec);"); -checker!(attr_trait_issue_64682, "fn trait5(#[a1] #[a2] arg1: u8, #[a3] arg2: u8);"); -checker!(rename_params, r#"impl Foo { - fn hello(#[angery(true)] a: i32, #[a2] b: i32, #[what = "how"] c: u32) { } - fn hello2(#[a1] #[a2] a: i32, #[what = "how"] b: i32, - #[angery(true)] c: u32) { - } - fn hello_self(#[a1] #[a2] &self, #[a1] #[a2] a: i32, - #[what = "how"] b: i32, #[angery(true)] c: u32) { - } +checker!(attr_extern, r#"extern "C" { fn ffi(#[a1] arg1 : i32, #[a2] ...) ; }"#); +checker!(attr_extern_cvar, r#"unsafe extern "C" fn cvar(arg1 : i32, #[a1] mut args : ...) { }"#); +checker!(attr_alias, "type Alias = fn(#[a1] u8, #[a2] ...) ;"); +checker!(attr_free, "fn free(#[a1] arg1 : u8) { let lam = | #[a2] W(x), #[a3] y | () ; }"); +checker!(attr_inherent_1, "fn inherent1(#[a1] self, #[a2] arg1 : u8) { }"); +checker!(attr_inherent_2, "fn inherent2(#[a1] & self, #[a2] arg1 : u8) { }"); +checker!(attr_inherent_3, "fn inherent3 < 'a > (#[a1] & 'a mut self, #[a2] arg1 : u8) { }"); +checker!(attr_inherent_4, "fn inherent4 < 'a > (#[a1] self : Box < Self >, #[a2] arg1 : u8) { }"); +checker!(attr_inherent_issue_64682, "fn inherent5(#[a1] #[a2] arg1 : u8, #[a3] arg2 : u8) { }"); +checker!(attr_trait_1, "fn trait1(#[a1] self, #[a2] arg1 : u8) ;"); +checker!(attr_trait_2, "fn trait2(#[a1] & self, #[a2] arg1 : u8) ;"); +checker!(attr_trait_3, "fn trait3 < 'a > (#[a1] & 'a mut self, #[a2] arg1 : u8) ;"); +checker!(attr_trait_4, r#"fn trait4 < 'a > +(#[a1] self : Box < Self >, #[a2] arg1 : u8, #[a3] Vec < u8 >) ;"#); +checker!(attr_trait_issue_64682, "fn trait5(#[a1] #[a2] arg1 : u8, #[a3] arg2 : u8) ;"); +checker!(rename_params, r#"impl Foo +{ + fn hello(#[angery(true)] a : i32, #[a2] b : i32, #[what = "how"] c : u32) + { } fn + hello2(#[a1] #[a2] a : i32, #[what = "how"] b : i32, #[angery(true)] c : + u32) { } fn + hello_self(#[a1] #[a2] & self, #[a1] #[a2] a : i32, #[what = "how"] b : + i32, #[angery(true)] c : u32) { } }"#); diff --git a/src/test/ui/rfcs/rfc-2396-target_feature-11/fn-traits.rs b/src/test/ui/rfcs/rfc-2396-target_feature-11/fn-traits.rs new file mode 100644 index 0000000000000..5c838fd719cd9 --- /dev/null +++ b/src/test/ui/rfcs/rfc-2396-target_feature-11/fn-traits.rs @@ -0,0 +1,34 @@ +// only-x86_64 + +#![feature(target_feature_11)] + +#[target_feature(enable = "avx")] +fn foo() {} + +#[target_feature(enable = "avx")] +unsafe fn foo_unsafe() {} + +fn call(f: impl Fn()) { + f() +} + +fn call_mut(f: impl FnMut()) { + f() +} + +fn call_once(f: impl FnOnce()) { + f() +} + +fn main() { + call(foo); //~ ERROR expected a `std::ops::Fn<()>` closure, found `fn() {foo}` + call_mut(foo); //~ ERROR expected a `std::ops::FnMut<()>` closure, found `fn() {foo}` + call_once(foo); //~ ERROR expected a `std::ops::FnOnce<()>` closure, found `fn() {foo}` + + call(foo_unsafe); + //~^ ERROR expected a `std::ops::Fn<()>` closure, found `unsafe fn() {foo_unsafe}` + call_mut(foo_unsafe); + //~^ ERROR expected a `std::ops::FnMut<()>` closure, found `unsafe fn() {foo_unsafe}` + call_once(foo_unsafe); + //~^ ERROR expected a `std::ops::FnOnce<()>` closure, found `unsafe fn() {foo_unsafe}` +} diff --git a/src/test/ui/rfcs/rfc-2396-target_feature-11/fn-traits.stderr b/src/test/ui/rfcs/rfc-2396-target_feature-11/fn-traits.stderr new file mode 100644 index 0000000000000..448077b439e80 --- /dev/null +++ b/src/test/ui/rfcs/rfc-2396-target_feature-11/fn-traits.stderr @@ -0,0 +1,81 @@ +error[E0277]: expected a `std::ops::Fn<()>` closure, found `fn() {foo}` + --> $DIR/fn-traits.rs:24:10 + | +LL | fn call(f: impl Fn()) { + | ---- required by this bound in `call` +... +LL | call(foo); + | ^^^ expected an `Fn<()>` closure, found `fn() {foo}` + | + = help: the trait `std::ops::Fn<()>` is not implemented for `fn() {foo}` + = note: wrap the `fn() {foo}` in a closure with no arguments: `|| { /* code */ } + = note: `#[target_feature]` functions do not implement the `Fn` traits + +error[E0277]: expected a `std::ops::FnMut<()>` closure, found `fn() {foo}` + --> $DIR/fn-traits.rs:25:14 + | +LL | fn call_mut(f: impl FnMut()) { + | ------- required by this bound in `call_mut` +... +LL | call_mut(foo); + | ^^^ expected an `FnMut<()>` closure, found `fn() {foo}` + | + = help: the trait `std::ops::FnMut<()>` is not implemented for `fn() {foo}` + = note: wrap the `fn() {foo}` in a closure with no arguments: `|| { /* code */ } + = note: `#[target_feature]` functions do not implement the `Fn` traits + +error[E0277]: expected a `std::ops::FnOnce<()>` closure, found `fn() {foo}` + --> $DIR/fn-traits.rs:26:15 + | +LL | fn call_once(f: impl FnOnce()) { + | -------- required by this bound in `call_once` +... +LL | call_once(foo); + | ^^^ expected an `FnOnce<()>` closure, found `fn() {foo}` + | + = help: the trait `std::ops::FnOnce<()>` is not implemented for `fn() {foo}` + = note: wrap the `fn() {foo}` in a closure with no arguments: `|| { /* code */ } + = note: `#[target_feature]` functions do not implement the `Fn` traits + +error[E0277]: expected a `std::ops::Fn<()>` closure, found `unsafe fn() {foo_unsafe}` + --> $DIR/fn-traits.rs:28:10 + | +LL | fn call(f: impl Fn()) { + | ---- required by this bound in `call` +... +LL | call(foo_unsafe); + | ^^^^^^^^^^ expected an `Fn<()>` closure, found `unsafe fn() {foo_unsafe}` + | + = help: the trait `std::ops::Fn<()>` is not implemented for `unsafe fn() {foo_unsafe}` + = note: wrap the `unsafe fn() {foo_unsafe}` in a closure with no arguments: `|| { /* code */ } + = note: `#[target_feature]` functions do not implement the `Fn` traits + +error[E0277]: expected a `std::ops::FnMut<()>` closure, found `unsafe fn() {foo_unsafe}` + --> $DIR/fn-traits.rs:30:14 + | +LL | fn call_mut(f: impl FnMut()) { + | ------- required by this bound in `call_mut` +... +LL | call_mut(foo_unsafe); + | ^^^^^^^^^^ expected an `FnMut<()>` closure, found `unsafe fn() {foo_unsafe}` + | + = help: the trait `std::ops::FnMut<()>` is not implemented for `unsafe fn() {foo_unsafe}` + = note: wrap the `unsafe fn() {foo_unsafe}` in a closure with no arguments: `|| { /* code */ } + = note: `#[target_feature]` functions do not implement the `Fn` traits + +error[E0277]: expected a `std::ops::FnOnce<()>` closure, found `unsafe fn() {foo_unsafe}` + --> $DIR/fn-traits.rs:32:15 + | +LL | fn call_once(f: impl FnOnce()) { + | -------- required by this bound in `call_once` +... +LL | call_once(foo_unsafe); + | ^^^^^^^^^^ expected an `FnOnce<()>` closure, found `unsafe fn() {foo_unsafe}` + | + = help: the trait `std::ops::FnOnce<()>` is not implemented for `unsafe fn() {foo_unsafe}` + = note: wrap the `unsafe fn() {foo_unsafe}` in a closure with no arguments: `|| { /* code */ } + = note: `#[target_feature]` functions do not implement the `Fn` traits + +error: aborting due to 6 previous errors + +For more information about this error, try `rustc --explain E0277`. diff --git a/src/test/ui/unsafe/ranged_ints_macro.rs b/src/test/ui/unsafe/ranged_ints_macro.rs new file mode 100644 index 0000000000000..9192ecfe196cb --- /dev/null +++ b/src/test/ui/unsafe/ranged_ints_macro.rs @@ -0,0 +1,16 @@ +// build-pass +#![feature(rustc_attrs)] + +macro_rules! apply { + ($val:expr) => { + #[rustc_layout_scalar_valid_range_start($val)] + #[repr(transparent)] + pub(crate) struct NonZero(pub(crate) T); + } +} + +apply!(1); + +fn main() { + let _x = unsafe { NonZero(1) }; +} diff --git a/triagebot.toml b/triagebot.toml index 73ca7abfed363..51a29553fdb3d 100644 --- a/triagebot.toml +++ b/triagebot.toml @@ -92,7 +92,7 @@ message_on_remove = "Issue #{number}'s prioritization request has been removed." [notify-zulip."I-nominated"] required_labels = ["T-compiler"] zulip_stream = 245100 # #t-compiler/wg-prioritization/alerts -topic = "I-prioritize #{number} {title}" +topic = "I-nominated #{number} {title}" message_on_add = """\ @*WG-prioritization/alerts* #{number} has been nominated for discussion in `T-compiler` meeting.